From f80b650390457e09e36e825784b12dceb5b5ab70 Mon Sep 17 00:00:00 2001 From: Manika Malhotra Date: Mon, 27 Apr 2026 20:25:09 +0530 Subject: [PATCH 01/19] chore(onboarding): add open-source tooling to interest in signoz option (#11083) --- .../AboutSigNozQuestions/AboutSigNozQuestions.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx b/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx index 5d0998487c..8f5c5cfebb 100644 --- a/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx +++ b/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx @@ -27,6 +27,7 @@ const interestedInOptions: Record = { singleTool: 'Single Tool (logs, metrics & traces) to reduce operational overhead', correlateSignals: 'Correlate signals for faster troubleshooting', + openSourceTooling: 'Prefer open-source tooling', }; export function AboutSigNozQuestions({ From d1c9864f52d765e0c3abb08556615e44fe1ce737 Mon Sep 17 00:00:00 2001 From: Prakhar Dewan <33227141+prakha@users.noreply.github.com> Date: Tue, 28 Apr 2026 00:53:12 +0530 Subject: [PATCH 02/19] chore: remove unused files (#11033) * chore: remove unused files * chore: add knip.json file --- frontend/knip.json | 5 ++ .../src/pages/AlertChannelCreate/config.tsx | 22 ----- .../src/pages/AlertChannelCreate/index.tsx | 19 ---- .../AlertHeader/AlertStatus/AlertStatus.tsx | 54 ------------ .../AlertHeader/AlertStatus/types.ts | 18 ---- frontend/src/pages/AlertHistory/index.tsx | 3 - frontend/src/pages/AllErrors/utils.tsx | 86 ------------------- frontend/src/pages/Billing/BillingPage.tsx | 13 --- frontend/src/pages/Billing/index.tsx | 3 - frontend/src/pages/CreateAlert/styles.ts | 18 ---- frontend/src/pages/HomePage/index.tsx | 3 - frontend/src/pages/LogsExplorer/styles.ts | 14 --- frontend/src/pages/LogsModulePage/utils.ts | 7 -- frontend/src/pages/MySettings/index.tsx | 6 -- .../src/pages/SomethingWentWrong/index.tsx | 25 ------ frontend/src/pages/TracesExplorer/styles.ts | 10 --- 16 files changed, 5 insertions(+), 301 deletions(-) create mode 100644 frontend/knip.json delete mode 100644 frontend/src/pages/AlertChannelCreate/config.tsx delete mode 100644 frontend/src/pages/AlertChannelCreate/index.tsx delete mode 100644 frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/AlertStatus.tsx delete mode 100644 frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/types.ts delete mode 100644 frontend/src/pages/AlertHistory/index.tsx delete mode 100644 frontend/src/pages/AllErrors/utils.tsx delete mode 100644 frontend/src/pages/Billing/BillingPage.tsx delete mode 100644 frontend/src/pages/Billing/index.tsx delete mode 100644 frontend/src/pages/CreateAlert/styles.ts delete mode 100644 frontend/src/pages/HomePage/index.tsx delete mode 100644 frontend/src/pages/LogsExplorer/styles.ts delete mode 100644 frontend/src/pages/LogsModulePage/utils.ts delete mode 100644 frontend/src/pages/MySettings/index.tsx delete mode 100644 frontend/src/pages/SomethingWentWrong/index.tsx delete mode 100644 frontend/src/pages/TracesExplorer/styles.ts diff --git a/frontend/knip.json b/frontend/knip.json new file mode 100644 index 0000000000..3641807b5c --- /dev/null +++ b/frontend/knip.json @@ -0,0 +1,5 @@ +{ + "$schema": "https://unpkg.com/knip@5/schema.json", + "project": ["src/**/*.ts", "src/**/*.tsx"], + "ignore": ["src/api/generated/**/*.ts"] +} \ No newline at end of file diff --git a/frontend/src/pages/AlertChannelCreate/config.tsx b/frontend/src/pages/AlertChannelCreate/config.tsx deleted file mode 100644 index ff366ca5a8..0000000000 --- a/frontend/src/pages/AlertChannelCreate/config.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import ROUTES from 'constants/routes'; -import CreateAlertChannels from 'container/CreateAlertChannels'; -import { ChannelType } from 'container/CreateAlertChannels/config'; -import GeneralSettings from 'container/GeneralSettings'; -import { t } from 'i18next'; - -export const alertsRoutesConfig = [ - { - Component: GeneralSettings, - name: t('routes.general'), - route: ROUTES.SETTINGS, - key: ROUTES.SETTINGS, - }, - { - Component: (): JSX.Element => ( - - ), - name: t('routes.alert_channels'), - route: ROUTES.CHANNELS_NEW, - key: ROUTES.CHANNELS_NEW, - }, -]; diff --git a/frontend/src/pages/AlertChannelCreate/index.tsx b/frontend/src/pages/AlertChannelCreate/index.tsx deleted file mode 100644 index 79944a0920..0000000000 --- a/frontend/src/pages/AlertChannelCreate/index.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import { useLocation } from 'react-router-dom'; -import RouteTab from 'components/RouteTab'; -import history from 'lib/history'; - -import { alertsRoutesConfig } from './config'; - -function SettingsPage(): JSX.Element { - const { pathname } = useLocation(); - - return ( - - ); -} - -export default SettingsPage; diff --git a/frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/AlertStatus.tsx b/frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/AlertStatus.tsx deleted file mode 100644 index 9bd5c8ab11..0000000000 --- a/frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/AlertStatus.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import { useMemo } from 'react'; -import { Color } from '@signozhq/design-tokens'; -import { CircleCheck, Siren } from 'lucide-react'; -import { getDurationFromNow } from 'utils/timeUtils'; - -import { AlertStatusProps, StatusConfig } from './types'; - -import './AlertStatus.styles.scss'; - -export default function AlertStatus({ - status, - timestamp, -}: AlertStatusProps): JSX.Element { - const statusConfig: StatusConfig = useMemo( - () => ({ - firing: { - icon: , - text: 'Firing since', - extraInfo: timestamp ? ( - <> -
-
{getDurationFromNow(timestamp)}
- - ) : null, - className: 'alert-status-info--firing', - }, - resolved: { - icon: ( - - ), - text: 'Resolved', - extraInfo: null, - className: 'alert-status-info--resolved', - }, - }), - [timestamp], - ); - - const currentStatus = statusConfig[status]; - - return ( -
-
{currentStatus.icon}
-
-
{currentStatus.text}
- {currentStatus.extraInfo} -
-
- ); -} diff --git a/frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/types.ts b/frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/types.ts deleted file mode 100644 index c297480f38..0000000000 --- a/frontend/src/pages/AlertDetails/AlertHeader/AlertStatus/types.ts +++ /dev/null @@ -1,18 +0,0 @@ -export type AlertStatusProps = - | { status: 'firing'; timestamp: number } - | { status: 'resolved'; timestamp?: number }; - -export type StatusConfig = { - firing: { - icon: JSX.Element; - text: string; - extraInfo: JSX.Element | null; - className: string; - }; - resolved: { - icon: JSX.Element; - text: string; - extraInfo: JSX.Element | null; - className: string; - }; -}; diff --git a/frontend/src/pages/AlertHistory/index.tsx b/frontend/src/pages/AlertHistory/index.tsx deleted file mode 100644 index 7a7b0d01d8..0000000000 --- a/frontend/src/pages/AlertHistory/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import AlertHistory from 'container/AlertHistory'; - -export default AlertHistory; diff --git a/frontend/src/pages/AllErrors/utils.tsx b/frontend/src/pages/AllErrors/utils.tsx deleted file mode 100644 index bde4354ec1..0000000000 --- a/frontend/src/pages/AllErrors/utils.tsx +++ /dev/null @@ -1,86 +0,0 @@ -import { - FiltersType, - IQuickFiltersConfig, -} from 'components/QuickFilters/types'; -import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; -import { DataSource } from 'types/common/queryBuilder'; - -export const ExceptionsQuickFiltersConfig: IQuickFiltersConfig[] = [ - { - type: FiltersType.CHECKBOX, - title: 'Environment', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'deployment.environment', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: true, - }, - { - type: FiltersType.CHECKBOX, - title: 'Service Name', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'service.name', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: false, - }, - { - type: FiltersType.CHECKBOX, - title: 'Hostname', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'host.name', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: false, - }, - { - type: FiltersType.CHECKBOX, - title: 'K8s Cluster Name', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'k8s.cluster.name', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: false, - }, - { - type: FiltersType.CHECKBOX, - title: 'K8s Deployment Name', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'k8s.deployment.name', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: false, - }, - { - type: FiltersType.CHECKBOX, - title: 'K8s Namespace Name', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'k8s.namespace.name', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: false, - }, - { - type: FiltersType.CHECKBOX, - title: 'K8s Pod Name', - dataSource: DataSource.TRACES, - attributeKey: { - key: 'k8s.pod.name', - dataType: DataTypes.String, - type: 'resource', - }, - defaultOpen: false, - }, -]; diff --git a/frontend/src/pages/Billing/BillingPage.tsx b/frontend/src/pages/Billing/BillingPage.tsx deleted file mode 100644 index 7d1f43102a..0000000000 --- a/frontend/src/pages/Billing/BillingPage.tsx +++ /dev/null @@ -1,13 +0,0 @@ -import BillingContainer from 'container/BillingContainer/BillingContainer'; - -import './BillingPage.styles.scss'; - -function BillingPage(): JSX.Element { - return ( -
- -
- ); -} - -export default BillingPage; diff --git a/frontend/src/pages/Billing/index.tsx b/frontend/src/pages/Billing/index.tsx deleted file mode 100644 index 8dad400fe0..0000000000 --- a/frontend/src/pages/Billing/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import BillingPage from './BillingPage'; - -export default BillingPage; diff --git a/frontend/src/pages/CreateAlert/styles.ts b/frontend/src/pages/CreateAlert/styles.ts deleted file mode 100644 index 9b66364b49..0000000000 --- a/frontend/src/pages/CreateAlert/styles.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Typography } from 'antd'; -import styled from 'styled-components'; - -export const Title = styled(Typography)` - &&& { - margin-top: 1rem; - margin-bottom: 1rem; - } -`; - -export const ButtonContainer = styled.div` - &&& { - display: flex; - justify-content: flex-end; - align-items: center; - margin-top: 1rem; - } -`; diff --git a/frontend/src/pages/HomePage/index.tsx b/frontend/src/pages/HomePage/index.tsx deleted file mode 100644 index d91fb083d2..0000000000 --- a/frontend/src/pages/HomePage/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import HomePage from './HomePage'; - -export default HomePage; diff --git a/frontend/src/pages/LogsExplorer/styles.ts b/frontend/src/pages/LogsExplorer/styles.ts deleted file mode 100644 index 54d553bc3c..0000000000 --- a/frontend/src/pages/LogsExplorer/styles.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Col } from 'antd'; -import { themeColors } from 'constants/theme'; -import styled from 'styled-components'; - -export const WrapperStyled = styled.div` - display: flex; - flex-direction: column; - flex: 1; - color: ${themeColors.lightWhite}; -`; - -export const ButtonWrapperStyled = styled(Col)` - margin-left: auto; -`; diff --git a/frontend/src/pages/LogsModulePage/utils.ts b/frontend/src/pages/LogsModulePage/utils.ts deleted file mode 100644 index 222c1c9ef6..0000000000 --- a/frontend/src/pages/LogsModulePage/utils.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const removeSourcePageFromPath = (path: string): string => { - const lastSlashIndex = path.lastIndexOf('/'); - if (lastSlashIndex !== -1) { - return path.substring(0, lastSlashIndex); - } - return path; -}; diff --git a/frontend/src/pages/MySettings/index.tsx b/frontend/src/pages/MySettings/index.tsx deleted file mode 100644 index f3f821e466..0000000000 --- a/frontend/src/pages/MySettings/index.tsx +++ /dev/null @@ -1,6 +0,0 @@ -import MySettingsContainer from 'container/MySettings'; - -function MySettings(): JSX.Element { - return ; -} -export default MySettings; diff --git a/frontend/src/pages/SomethingWentWrong/index.tsx b/frontend/src/pages/SomethingWentWrong/index.tsx deleted file mode 100644 index cfd085254b..0000000000 --- a/frontend/src/pages/SomethingWentWrong/index.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import { Button, Typography } from 'antd'; -import SomethingWentWrongAsset from 'assets/SomethingWentWrong'; -import { Container } from 'components/NotFound/styles'; -import ROUTES from 'constants/routes'; -import history from 'lib/history'; - -function SomethingWentWrong(): JSX.Element { - return ( - - - Oops! Something went wrong - - - ); -} - -export default SomethingWentWrong; diff --git a/frontend/src/pages/TracesExplorer/styles.ts b/frontend/src/pages/TracesExplorer/styles.ts deleted file mode 100644 index 9e68ad5a5c..0000000000 --- a/frontend/src/pages/TracesExplorer/styles.ts +++ /dev/null @@ -1,10 +0,0 @@ -import styled from 'styled-components'; - -export const Container = styled.div` - margin: 1rem 0; -`; - -export const ActionsWrapper = styled.div` - display: flex; - justify-content: flex-end; -`; From 500ce85ccbbe43fe71ffaa677e07498a634e1941 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vinicius=20Louren=C3=A7o?= <12551007+H4ad@users.noreply.github.com> Date: Tue, 28 Apr 2026 01:27:45 -0300 Subject: [PATCH 03/19] chore(oxlint): run fixes (#11123) * chore(oxlint): run fixes * test(to-strict): failing due to missing props --- frontend/package.json | 13 +- .../__tests__/getFieldKeys.test.ts | 2 +- .../__tests__/getFieldValues.test.ts | 2 +- frontend/src/api/interceptors.test.ts | 10 +- .../v5/queryRange/convertV5Response.test.ts | 10 +- .../prepareQueryRangePayloadV5.test.ts | 51 +- .../DownloadOptionsMenu.test.tsx | 4 +- .../DraggableTableRow/tests/utils.test.ts | 16 +- .../__tests__/EditMemberDrawer.test.tsx | 12 +- .../src/components/Graph/Plugin/Legend.ts | 2 +- .../Graph/__tests__/xAxisConfig.test.ts | 48 +- .../Graph/__tests__/yAxisConfig.test.ts | 46 +- .../__tests__/InviteMembersModal.test.tsx | 18 +- .../Logs/LogStateIndicator/utils.test.ts | 2 +- .../LogsFormatOptionsMenu.tsx | 2 +- .../MarkdownRenderer/MarkdownRenderer.tsx | 4 +- .../CustomMultiSelect.comprehensive.test.tsx | 90 ++-- .../CustomSelect.comprehensive.test.tsx | 74 +-- .../VariableItem.integration.test.tsx | 26 +- .../OverflowInputToolTip.test.tsx | 12 +- .../traceOperatorContextUtils.test.ts | 10 +- .../TraceOperator/__tests__/utils.test.ts | 8 +- .../__tests__/previousQuery.utils.test.ts | 2 +- .../QueryBuilderV2/__tests__/utils.test.ts | 66 +-- .../Checkbox/Checkbox.test.tsx | 2 +- .../QuickFilters/tests/QuickFilters.test.tsx | 26 +- .../src/components/RouteTab/RouteTab.test.tsx | 12 +- .../__tests__/EditKeyModal.test.tsx | 14 +- .../__tests__/ServiceAccountDrawer.test.tsx | 30 +- .../__tests__/TanStackCustomTableRow.test.tsx | 4 +- .../__tests__/TanStackRow.test.tsx | 2 +- .../__tests__/useColumnState.test.tsx | 31 +- .../__tests__/useColumnStore.test.ts | 40 +- .../__tests__/useTableParams.test.tsx | 24 +- .../__tests__/formatter.test.tsx | 70 +-- .../__tests__/utils.test.tsx | 6 +- .../cmdKPalette/__test__/cmdkPalette.test.tsx | 12 +- .../__tests__/AlertChannels.test.tsx | 6 +- .../AlertChannelsNormalUser.test.tsx | 4 +- .../__tests__/CreateAlertChannel.test.tsx | 26 +- .../CreateAlertChannelNormalUser.test.tsx | 20 +- .../__tests__/EditAlertChannel.test.tsx | 14 +- frontend/src/container/AllError/utils.test.ts | 20 +- .../AnomalyAlertEvaluationView.tsx | 8 +- .../components/DomainMetrics.test.tsx | 4 +- .../components/EndPointMetrics.test.tsx | 4 +- .../__tests__/APIMonitoringUtils.test.tsx | 36 +- .../AllEndpointsWidgetV5Migration.test.tsx | 12 +- .../EndpointDropdownV5Migration.test.tsx | 2 +- .../__tests__/queryParams.test.tsx | 4 +- .../BillingContainer.test.tsx | 34 +- .../__tests__/utils.test.ts | 34 +- .../__tests__/QuerySection.test.tsx | 10 +- .../context/__tests__/utils.test.tsx | 76 +-- .../__tests__/CustomDomainSettings.test.tsx | 16 +- .../VariableItem/VariableItem.test.tsx | 48 +- .../DashboardVariableSettings/index.tsx | 6 +- .../__tests__/PublicDashboard.test.tsx | 38 +- .../VariableItem.test.tsx | 12 +- .../DashboardVariableSelection.test.tsx | 2 +- .../VariableItem.defaulting.behavior.test.tsx | 16 +- .../__test__/dashboardVariables.test.tsx | 40 +- .../DashboardVariablesSelection/util.ts | 2 +- .../DashboardVariablesSelection/utils.test.ts | 40 +- .../__test__/PanelManagement.test.tsx | 2 +- .../charts/utils/__tests__/stackUtils.test.ts | 34 +- .../__tests__/useBarChartStacking.test.ts | 16 +- .../TimeSeriesPanel/__tests__/utils.test.ts | 12 +- .../__tests__/legendVisibilityUtils.test.ts | 20 +- .../__tests__/ForgotPassword.test.tsx | 34 +- .../usePrefillAlertConditions.test.ts | 4 +- .../container/FormAlertRules/utils.test.ts | 4 +- .../__tests__/utils.test.tsx | 2 +- .../EntityLogs/__tests__/EntityLogs.test.tsx | 4 +- .../__tests__/ServiceDetailsS3Sync.test.tsx | 2 +- .../ListAlertRules/__test__/utils.test.ts | 4 +- .../__tests__/ContextLogRenderer.test.tsx | 12 +- .../container/LogDetailedView/util.test.ts | 28 +- .../src/container/LogDetailedView/utils.tsx | 2 +- .../LogExplorerQuerySection.test.tsx | 2 +- .../__tests__/useInitialQuery.test.ts | 2 +- .../tests/LogsExplorerPagination.test.tsx | 14 +- .../tests/LogsExplorerViews.test.tsx | 4 +- .../MembersSettings.integration.test.tsx | 6 +- .../__tests__/TopOperationsTable.test.tsx | 4 +- .../MetricsApplication/utils.test.ts | 16 +- .../Explorer/__tests__/TimeSeries.test.tsx | 10 +- .../Explorer/__tests__/utils.test.tsx | 12 +- .../__test__/ContextLinks.test.tsx | 2 +- .../NewWidget/__test__/NewWidget.test.tsx | 20 +- .../__test__/getUplotChartData.test.ts | 8 +- .../NewWidget/__test__/utils.test.ts | 4 +- .../Steps/MarkdownStep/MarkdownStep.tsx | 4 +- .../utils/dataSourceUtils.ts | 2 +- .../__tests__/OnboardingQuestionaire.test.tsx | 88 ++-- .../AuthDomain/__tests__/CreateEdit.test.tsx | 12 +- .../tests/ChangeHistory.test.tsx | 6 +- .../FormFields/FilterInput/index.tsx | 2 +- .../tests/PipelineListsView.test.tsx | 31 +- .../PipelinePage/tests/utils.test.ts | 42 +- .../RunQueryBtn/__test__/RunQueryBtn.test.tsx | 14 +- .../__test__/GroupByFilter.test.tsx | 12 +- .../HavingFilter/__tests__/utils.test.tsx | 14 +- .../MetricNameSelector.test.tsx | 31 +- .../__test__/SpanScopeSelector.test.tsx | 30 +- .../Drilldown/__tests__/Breakout.test.tsx | 6 +- .../__tests__/TableDrilldown.test.tsx | 10 +- .../QueryTable/__test__/QueryTable.test.tsx | 4 +- .../__tests__/RoleDetailsPage.test.tsx | 24 +- .../__tests__/RolesSettings.test.tsx | 32 +- .../RolesSettings/__tests__/utils.test.ts | 34 +- .../__tests__/useRoutingPolicies.test.tsx | 2 +- ...rviceAccountsSettings.integration.test.tsx | 12 +- .../ServiceMetrics/ServiceMetrics.test.tsx | 6 +- .../ServiceTraces/ServicTraces.test.tsx | 6 +- frontend/src/container/SideNav/helper.test.ts | 6 +- frontend/src/container/Trace/Graph/config.ts | 2 +- .../Trace/TraceGraphFilter/utils.test.ts | 4 +- .../src/container/TraceDetail/utils.test.ts | 10 +- .../__tests__/TraceFlameGraph.test.tsx | 2 +- .../__tests__/NoFilterTable.test.tsx | 4 +- .../TriggeredAlerts/__tests__/utils.test.tsx | 8 +- .../hooks/__tests__/useUrlQueryData.test.tsx | 50 +- .../useDashboardsListQueryParams.test.ts | 8 +- .../useTransformDashboardVariables.test.tsx | 4 +- .../__test__/useVariablesFromUrl.test.tsx | 26 +- .../hooks/queryBuilder/useIsValidTag.test.ts | 8 +- frontend/src/hooks/useAuthZ/useAuthZ.test.tsx | 22 +- .../src/hooks/useComponentPermission.test.ts | 6 +- frontend/src/hooks/useGetQueryLabels.test.ts | 12 +- frontend/src/hooks/useInterval.test.ts | 8 +- frontend/src/hooks/usePreviousValue.test.tsx | 4 +- .../__tests__/whitelistedKeys.test.ts | 8 +- frontend/src/hooks/useUrlQuery.test.tsx | 12 +- frontend/src/lib/__tests__/getStep.test.ts | 16 +- .../src/lib/__tests__/logql/parser.test.ts | 4 +- .../lib/__tests__/logql/reverseParser.test.ts | 6 +- .../src/lib/__tests__/logql/splitter.test.ts | 4 +- .../variableReference.test.ts | 41 +- frontend/src/lib/getRandomColor.test.ts | 8 +- frontend/src/lib/getStep.test.ts | 16 +- .../chooseAutocompleteFromCustomValue.test.ts | 18 +- .../src/lib/uPlotLib/plugins/onClickPlugin.ts | 2 +- .../src/lib/uPlotLib/plugins/tooltipPlugin.ts | 2 +- .../lib/uPlotLib/utils/getYAxisScale.test.ts | 22 +- .../utils/tests/getSeriesData.test.ts | 12 +- .../utils/tests/getUplotChartOptions.test.ts | 30 +- .../Tooltip/__tests__/utils.test.ts | 2 +- .../components/__tests__/UPlotChart.test.tsx | 12 +- .../config/__tests__/UPlotAxisBuilder.test.ts | 21 +- .../__tests__/UPlotConfigBuilder.test.ts | 22 +- .../__tests__/UPlotScaleBuilder.test.ts | 4 +- .../__tests__/UPlotSeriesBuilder.test.ts | 2 +- .../context/__tests__/PlotContext.test.tsx | 4 +- .../hooks/__tests__/useLegendsSync.test.ts | 4 +- .../uPlotV2/utils/__tests__/dataUtils.test.ts | 34 +- .../lib/uPlotV2/utils/__tests__/scale.test.ts | 14 +- .../__tests__/seriesPointsFilter.test.ts | 8 +- .../uPlotV2/utils/__tests__/threshold.test.ts | 2 +- .../__tests__/LogsExplorer.test.tsx | 10 +- .../pages/SaveView/__test__/SaveView.test.tsx | 12 +- .../pages/ServiceTopLevelOperations/index.tsx | 2 +- frontend/src/pages/Services/Metrics.test.tsx | 2 +- .../pages/SignUp/__tests__/SignUp.test.tsx | 12 +- .../__test__/TracesExplorer.test.tsx | 48 +- .../WorkspaceLocked/WorkspaceLocked.test.tsx | 6 +- .../Dashboard/__tests__/Dashboard.test.tsx | 9 +- .../__tests__/normalizeUrlValue.test.ts | 36 +- .../__tests__/variableFetchStore.test.ts | 8 +- .../__tests__/dashboardVariablesStore.test.ts | 16 +- .../dashboardVariablesStoreUtils.test.ts | 48 +- .../PreferencesProvider.integration.test.tsx | 4 +- .../__tests__/logsLoaderConfig.test.ts | 22 +- .../__tests__/logsUpdaterConfig.test.ts | 4 +- .../__tests__/tracesLoaderConfig.test.ts | 28 +- .../__tests__/usePreferenceLoader.test.tsx | 6 +- .../store/globalTime/__tests__/utils.test.ts | 6 +- .../src/tests/mapQueryDataFromApi.test.ts | 12 +- .../utils/__tests__/queryContextUtils.test.ts | 72 +-- .../utils/__tests__/sanitizeOrderBy.test.ts | 6 +- .../src/utils/__tests__/spanToTree.test.ts | 12 +- frontend/src/utils/timeUtils.ts | 2 +- frontend/tsconfig.json | 1 - frontend/yarn.lock | 464 +++++++++--------- 184 files changed, 1768 insertions(+), 1639 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index 6b340d8e39..1ad522a8c8 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -212,9 +212,9 @@ "msw": "1.3.2", "npm-run-all": "latest", "orval": "7.18.0", - "oxfmt": "0.46.0", - "oxlint": "1.61.0", - "oxlint-tsgolint": "0.21.1", + "oxfmt": "0.47.0", + "oxlint": "1.62.0", + "oxlint-tsgolint": "0.22.1", "portfinder-sync": "^0.0.2", "postcss": "8.5.6", "postcss-scss": "4.0.9", @@ -238,9 +238,12 @@ }, "lint-staged": { "*.(js|jsx|ts|tsx)": [ - "oxfmt --check", - "oxlint --quiet", + "oxlint --fix", + "oxfmt --write", "sh scripts/typecheck-staged.sh" + ], + "*.(scss|css)": [ + "stylelint" ] }, "resolutions": { diff --git a/frontend/src/api/dynamicVariables/__tests__/getFieldKeys.test.ts b/frontend/src/api/dynamicVariables/__tests__/getFieldKeys.test.ts index a6cc5d262b..c9bc7ce928 100644 --- a/frontend/src/api/dynamicVariables/__tests__/getFieldKeys.test.ts +++ b/frontend/src/api/dynamicVariables/__tests__/getFieldKeys.test.ts @@ -104,7 +104,7 @@ describe('getFieldKeys API', () => { const result = await getFieldKeys('traces'); // Verify the returned structure matches SuccessResponseV2 format - expect(result).toEqual({ + expect(result).toStrictEqual({ httpStatusCode: 200, data: mockSuccessResponse.data.data, }); diff --git a/frontend/src/api/dynamicVariables/__tests__/getFieldValues.test.ts b/frontend/src/api/dynamicVariables/__tests__/getFieldValues.test.ts index 17bfec1bf1..042bb56429 100644 --- a/frontend/src/api/dynamicVariables/__tests__/getFieldValues.test.ts +++ b/frontend/src/api/dynamicVariables/__tests__/getFieldValues.test.ts @@ -199,7 +199,7 @@ describe('getFieldValues API', () => { const result = await getFieldValues('traces', 'service.name'); // Verify the returned structure matches SuccessResponseV2 format - expect(result).toEqual({ + expect(result).toStrictEqual({ httpStatusCode: 200, data: expect.objectContaining({ values: expect.any(Object), diff --git a/frontend/src/api/interceptors.test.ts b/frontend/src/api/interceptors.test.ts index 4ba8b2cafe..2429ab4d1b 100644 --- a/frontend/src/api/interceptors.test.ts +++ b/frontend/src/api/interceptors.test.ts @@ -76,10 +76,10 @@ describe('interceptorRejected', () => { } const mockAxiosFn = axios as unknown as jest.Mock; - expect(mockAxiosFn.mock.calls.length).toBe(1); + expect(mockAxiosFn.mock.calls).toHaveLength(1); const retryCallConfig = mockAxiosFn.mock.calls[0][0]; expect(Array.isArray(JSON.parse(retryCallConfig.data))).toBe(true); - expect(JSON.parse(retryCallConfig.data)).toEqual(arrayPayload); + expect(JSON.parse(retryCallConfig.data)).toStrictEqual(arrayPayload); }); it('should preserve object payload structure when retrying a 401 request', async () => { @@ -112,9 +112,9 @@ describe('interceptorRejected', () => { } const mockAxiosFn = axios as unknown as jest.Mock; - expect(mockAxiosFn.mock.calls.length).toBe(1); + expect(mockAxiosFn.mock.calls).toHaveLength(1); const retryCallConfig = mockAxiosFn.mock.calls[0][0]; - expect(JSON.parse(retryCallConfig.data)).toEqual(objectPayload); + expect(JSON.parse(retryCallConfig.data)).toStrictEqual(objectPayload); }); it('should handle undefined data gracefully when retrying', async () => { @@ -145,7 +145,7 @@ describe('interceptorRejected', () => { } const mockAxiosFn = axios as unknown as jest.Mock; - expect(mockAxiosFn.mock.calls.length).toBe(1); + expect(mockAxiosFn.mock.calls).toHaveLength(1); const retryCallConfig = mockAxiosFn.mock.calls[0][0]; expect(retryCallConfig.data).toBeUndefined(); }); diff --git a/frontend/src/api/v5/queryRange/convertV5Response.test.ts b/frontend/src/api/v5/queryRange/convertV5Response.test.ts index 9c7e0cabf2..65089e7adb 100644 --- a/frontend/src/api/v5/queryRange/convertV5Response.test.ts +++ b/frontend/src/api/v5/queryRange/convertV5Response.test.ts @@ -99,7 +99,7 @@ describe('convertV5ResponseToLegacy', () => { const q = result.payload.data.result[0]; expect(q.queryName).toBe('A'); expect(q.legend).toBe('{{service.name}}'); - expect(q.series?.[0]).toEqual( + expect(q.series?.[0]).toStrictEqual( expect.objectContaining({ labels: { 'service.name': 'adservice' }, values: [ @@ -186,7 +186,7 @@ describe('convertV5ResponseToLegacy', () => { expect(result.payload.data.resultType).toBe('scalar'); const [tableEntry] = result.payload.data.result; - expect(tableEntry.table?.columns).toEqual([ + expect(tableEntry.table?.columns).toStrictEqual([ { name: 'service.name', queryName: 'A', @@ -202,7 +202,7 @@ describe('convertV5ResponseToLegacy', () => { }, { name: 'F1', queryName: 'F1', isValueColumn: true, id: 'F1' }, ]); - expect(tableEntry.table?.rows?.[0]).toEqual({ + expect(tableEntry.table?.rows?.[0]).toStrictEqual({ data: { 'service.name': 'adservice', 'A.count()': 606, @@ -257,7 +257,7 @@ describe('convertV5ResponseToLegacy', () => { expect(result.payload.data.resultType).toBe('scalar'); const [tableEntry] = result.payload.data.result; - expect(tableEntry.table?.columns).toEqual([ + expect(tableEntry.table?.columns).toStrictEqual([ { name: 'service.name', queryName: 'A', @@ -267,7 +267,7 @@ describe('convertV5ResponseToLegacy', () => { // Single aggregation: name resolves to legend, id resolves to queryName { name: '{{service.name}}', queryName: 'A', isValueColumn: true, id: 'A' }, ]); - expect(tableEntry.table?.rows?.[0]).toEqual({ + expect(tableEntry.table?.rows?.[0]).toStrictEqual({ data: { 'service.name': 'adservice', A: 580, diff --git a/frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.test.ts b/frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.test.ts index 773946d8d5..59cf631846 100644 --- a/frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.test.ts +++ b/frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.test.ts @@ -104,7 +104,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: { A: 'Legend A', F1: 'Formula Legend' }, queryPayload: expect.objectContaining({ @@ -154,7 +154,10 @@ describe('prepareQueryRangePayloadV5', () => { ); // Legend map combines builder and formulas - expect(result.legendMap).toEqual({ A: 'Legend A', F1: 'Formula Legend' }); + expect(result.legendMap).toStrictEqual({ + A: 'Legend A', + F1: 'Formula Legend', + }); const payload: QueryRangePayloadV5 = result.queryPayload; @@ -166,10 +169,10 @@ describe('prepareQueryRangePayloadV5', () => { expect(payload.formatOptions?.fillGaps).toBe(true); // Variables mapped as { key: { value } } - expect(payload.variables).toEqual({ - svc: { value: 'api' }, - count: { value: 5 }, - flag: { value: true }, + expect(payload.variables).toStrictEqual({ + svc: { value: 'api', type: undefined }, + count: { value: 5, type: undefined }, + flag: { value: true, type: undefined }, }); // Queries include one builder_query and one builder_formula @@ -226,7 +229,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: { A: 'LP' }, queryPayload: expect.objectContaining({ @@ -255,7 +258,7 @@ describe('prepareQueryRangePayloadV5', () => { }), ); - expect(result.legendMap).toEqual({ A: 'LP' }); + expect(result.legendMap).toStrictEqual({ A: 'LP' }); const payload: QueryRangePayloadV5 = result.queryPayload; expect(payload.requestType).toBe('time_series'); @@ -296,7 +299,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: { Q: 'LC' }, queryPayload: expect.objectContaining({ @@ -324,7 +327,7 @@ describe('prepareQueryRangePayloadV5', () => { }), ); - expect(result.legendMap).toEqual({ Q: 'LC' }); + expect(result.legendMap).toStrictEqual({ Q: 'LC' }); const payload: QueryRangePayloadV5 = result.queryPayload; expect(payload.requestType).toBe('scalar'); @@ -353,7 +356,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: {}, queryPayload: expect.objectContaining({ @@ -397,7 +400,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: { A: 'Legend A' }, queryPayload: expect.objectContaining({ @@ -471,7 +474,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: { A: 'Legend A' }, queryPayload: expect.objectContaining({ @@ -585,7 +588,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result).toEqual( + expect(result).toStrictEqual( expect.objectContaining({ legendMap: { A: '{{service.name}}' }, queryPayload: expect.objectContaining({ @@ -684,7 +687,7 @@ describe('prepareQueryRangePayloadV5', () => { const result = prepareQueryRangePayloadV5(props); - expect(result.legendMap).toEqual({ A: 'Legend A' }); + expect(result.legendMap).toStrictEqual({ A: 'Legend A' }); expect(result.queryPayload.compositeQuery.queries).toHaveLength(1); const builderQuery = result.queryPayload.compositeQuery.queries.find( @@ -694,7 +697,7 @@ describe('prepareQueryRangePayloadV5', () => { expect(logSpec.name).toBe('A'); expect(logSpec.signal).toBe('logs'); - expect(logSpec.filter).toEqual({ + expect(logSpec.filter).toStrictEqual({ expression: "service.name = 'payment-service' AND http.status_code >= 400 AND message contains 'error'", }); @@ -731,7 +734,9 @@ describe('prepareQueryRangePayloadV5', () => { (q) => q.type === 'builder_query', ) as QueryEnvelope; const logSpec = builderQuery.spec as LogBuilderQuery; - expect(logSpec.filter).toEqual({ expression: 'http.status_code >= 500' }); + expect(logSpec.filter).toStrictEqual({ + expression: 'http.status_code >= 500', + }); }); it('derives expression from filters when filter is undefined', () => { @@ -775,7 +780,9 @@ describe('prepareQueryRangePayloadV5', () => { (q) => q.type === 'builder_query', ) as QueryEnvelope; const logSpec = builderQuery.spec as LogBuilderQuery; - expect(logSpec.filter).toEqual({ expression: "service.name = 'checkout'" }); + expect(logSpec.filter).toStrictEqual({ + expression: "service.name = 'checkout'", + }); }); it('prefers filter.expression over filters when both are present', () => { @@ -819,7 +826,9 @@ describe('prepareQueryRangePayloadV5', () => { (q) => q.type === 'builder_query', ) as QueryEnvelope; const logSpec = builderQuery.spec as LogBuilderQuery; - expect(logSpec.filter).toEqual({ expression: "service.name = 'frontend'" }); + expect(logSpec.filter).toStrictEqual({ + expression: "service.name = 'frontend'", + }); }); it('returns empty expression when neither filter nor filters provided', () => { @@ -853,7 +862,7 @@ describe('prepareQueryRangePayloadV5', () => { (q) => q.type === 'builder_query', ) as QueryEnvelope; const logSpec = builderQuery.spec as LogBuilderQuery; - expect(logSpec.filter).toEqual({ expression: '' }); + expect(logSpec.filter).toStrictEqual({ expression: '' }); }); it('returns empty expression when filters provided with empty items', () => { @@ -887,6 +896,6 @@ describe('prepareQueryRangePayloadV5', () => { (q) => q.type === 'builder_query', ) as QueryEnvelope; const logSpec = builderQuery.spec as LogBuilderQuery; - expect(logSpec.filter).toEqual({ expression: '' }); + expect(logSpec.filter).toStrictEqual({ expression: '' }); }); }); diff --git a/frontend/src/components/DownloadOptionsMenu/DownloadOptionsMenu.test.tsx b/frontend/src/components/DownloadOptionsMenu/DownloadOptionsMenu.test.tsx index fa5b202362..5706190637 100644 --- a/frontend/src/components/DownloadOptionsMenu/DownloadOptionsMenu.test.tsx +++ b/frontend/src/components/DownloadOptionsMenu/DownloadOptionsMenu.test.tsx @@ -213,7 +213,7 @@ describe.each([ const callArgs = mockDownloadExportData.mock.calls[0][0]; const query = callArgs.body.compositeQuery.queries[0]; expect(query.spec.groupBy).toBeUndefined(); - expect(query.spec.having).toEqual({ expression: '' }); + expect(query.spec.having).toStrictEqual({ expression: '' }); }); }); @@ -238,7 +238,7 @@ describe.each([ expect(mockDownloadExportData).toHaveBeenCalledTimes(1); const callArgs = mockDownloadExportData.mock.calls[0][0]; const query = callArgs.body.compositeQuery.queries[0]; - expect(query.spec.selectFields).toEqual([ + expect(query.spec.selectFields).toStrictEqual([ expect.objectContaining({ name: 'http.status', fieldDataType: 'int64', diff --git a/frontend/src/components/DraggableTableRow/tests/utils.test.ts b/frontend/src/components/DraggableTableRow/tests/utils.test.ts index 80854944c7..66ada095c9 100644 --- a/frontend/src/components/DraggableTableRow/tests/utils.test.ts +++ b/frontend/src/components/DraggableTableRow/tests/utils.test.ts @@ -6,39 +6,39 @@ jest.mock('react-dnd', () => ({ })); describe('Utils testing of DraggableTableRow component', () => { - test('Should dropHandler return true', () => { + it('Should dropHandler return true', () => { const monitor = { isOver: jest.fn().mockReturnValueOnce(true), } as never; const dropDataTruthy = dropHandler(monitor); - expect(dropDataTruthy).toEqual({ isOver: true }); + expect(dropDataTruthy).toStrictEqual({ isOver: true }); }); - test('Should dropHandler return false', () => { + it('Should dropHandler return false', () => { const monitor = { isOver: jest.fn().mockReturnValueOnce(false), } as never; const dropDataFalsy = dropHandler(monitor); - expect(dropDataFalsy).toEqual({ isOver: false }); + expect(dropDataFalsy).toStrictEqual({ isOver: false }); }); - test('Should dragHandler return true', () => { + it('Should dragHandler return true', () => { const monitor = { isDragging: jest.fn().mockReturnValueOnce(true), } as never; const dragDataTruthy = dragHandler(monitor); - expect(dragDataTruthy).toEqual({ isDragging: true }); + expect(dragDataTruthy).toStrictEqual({ isDragging: true }); }); - test('Should dragHandler return false', () => { + it('Should dragHandler return false', () => { const monitor = { isDragging: jest.fn().mockReturnValueOnce(false), } as never; const dragDataFalsy = dragHandler(monitor); - expect(dragDataFalsy).toEqual({ isDragging: false }); + expect(dragDataFalsy).toStrictEqual({ isDragging: false }); }); }); diff --git a/frontend/src/components/EditMemberDrawer/__tests__/EditMemberDrawer.test.tsx b/frontend/src/components/EditMemberDrawer/__tests__/EditMemberDrawer.test.tsx index 29a5d21b2c..9eb7e279ec 100644 --- a/frontend/src/components/EditMemberDrawer/__tests__/EditMemberDrawer.test.tsx +++ b/frontend/src/components/EditMemberDrawer/__tests__/EditMemberDrawer.test.tsx @@ -361,9 +361,9 @@ describe('EditMemberDrawer', () => { await user.click(screen.getByRole('button', { name: /delete member/i })); - expect( - await screen.findByText(/are you sure you want to delete/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/are you sure you want to delete/i), + ).resolves.toBeInTheDocument(); const confirmBtns = screen.getAllByRole('button', { name: /delete member/i }); await user.click(confirmBtns[confirmBtns.length - 1]); @@ -441,9 +441,9 @@ describe('EditMemberDrawer', () => { await user.click(screen.getByRole('button', { name: /revoke invite/i })); - expect( - await screen.findByText(/Are you sure you want to revoke the invite/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/Are you sure you want to revoke the invite/i), + ).resolves.toBeInTheDocument(); const confirmBtns = screen.getAllByRole('button', { name: /revoke invite/i }); await user.click(confirmBtns[confirmBtns.length - 1]); diff --git a/frontend/src/components/Graph/Plugin/Legend.ts b/frontend/src/components/Graph/Plugin/Legend.ts index 0dfc8728f6..d8f4f05e77 100644 --- a/frontend/src/components/Graph/Plugin/Legend.ts +++ b/frontend/src/components/Graph/Plugin/Legend.ts @@ -64,7 +64,7 @@ export const legend = (id: string, isLonger: boolean): Plugin => ({ // li.style.marginTop = '5px'; li.onclick = (): void => { - // @ts-ignore + // @ts-expect-error const { type } = chart.config; if (type === 'pie' || type === 'doughnut') { // Pie and doughnut charts only have a single dataset and visibility is per item diff --git a/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts b/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts index b26a2437c1..e872f75899 100644 --- a/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts +++ b/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts @@ -9,65 +9,65 @@ describe('xAxisConfig for Chart', () => { const start = dayjs(); const end = start.add(10, 'millisecond'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.millisecond, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.millisecond); } { const start = dayjs(); const end = start.add(10, 'second'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.second, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.second); } { const start = dayjs(); const end = start.add(10, 'minute'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.minute, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.minute); } { const start = dayjs(); const end = start.add(10, 'hour'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.hour, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.hour); } { const start = dayjs(); const end = start.add(10, 'day'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.day, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.day); } { const start = dayjs(); const end = start.add(10, 'week'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.week, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.week); } { const start = dayjs(); const end = start.add(10, 'month'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.month, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.month); } { const start = dayjs(); const end = start.add(10, 'year'); - expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( - TIME_UNITS.year, - ); + expect( + convertTimeRange(start.valueOf(), end.valueOf()).unitName, + ).toStrictEqual(TIME_UNITS.year); } }); }); diff --git a/frontend/src/components/Graph/__tests__/yAxisConfig.test.ts b/frontend/src/components/Graph/__tests__/yAxisConfig.test.ts index 6b56ee40d4..27b1b0d5f8 100644 --- a/frontend/src/components/Graph/__tests__/yAxisConfig.test.ts +++ b/frontend/src/components/Graph/__tests__/yAxisConfig.test.ts @@ -7,7 +7,7 @@ const testFullPrecisionGetYAxisFormattedValue = ( ): string => getYAxisFormattedValue(value, format, PrecisionOptionsEnum.FULL); describe('getYAxisFormattedValue - none (full precision legacy assertions)', () => { - test('large integers and decimals', () => { + it('large integers and decimals', () => { expect(testFullPrecisionGetYAxisFormattedValue('250034', 'none')).toBe( '250034', ); @@ -22,7 +22,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () ); }); - test('preserves leading zeros after decimal until first non-zero', () => { + it('preserves leading zeros after decimal until first non-zero', () => { expect(testFullPrecisionGetYAxisFormattedValue('1.0000234', 'none')).toBe( '1.0000234', ); @@ -31,7 +31,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () ); }); - test('trims to three significant decimals and removes trailing zeros', () => { + it('trims to three significant decimals and removes trailing zeros', () => { expect( testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'none'), ).toBe('0.000000250034'); @@ -55,7 +55,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () ).toBe('0.00000025'); }); - test('whole numbers normalize', () => { + it('whole numbers normalize', () => { expect(testFullPrecisionGetYAxisFormattedValue('1000', 'none')).toBe('1000'); expect(testFullPrecisionGetYAxisFormattedValue('99.5458', 'none')).toBe( '99.5458', @@ -68,7 +68,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () ); }); - test('strip redundant decimal zeros', () => { + it('strip redundant decimal zeros', () => { expect(testFullPrecisionGetYAxisFormattedValue('1000.000', 'none')).toBe( '1000', ); @@ -78,7 +78,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () expect(testFullPrecisionGetYAxisFormattedValue('1.000', 'none')).toBe('1'); }); - test('edge values', () => { + it('edge values', () => { expect(testFullPrecisionGetYAxisFormattedValue('0', 'none')).toBe('0'); expect(testFullPrecisionGetYAxisFormattedValue('-0', 'none')).toBe('0'); expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'none')).toBe('∞'); @@ -92,7 +92,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () expect(testFullPrecisionGetYAxisFormattedValue('abc123', 'none')).toBe('NaN'); }); - test('small decimals keep precision as-is', () => { + it('small decimals keep precision as-is', () => { expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'none')).toBe( '0.0001', ); @@ -104,7 +104,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () ); }); - test('simple decimals preserved', () => { + it('simple decimals preserved', () => { expect(testFullPrecisionGetYAxisFormattedValue('0.1', 'none')).toBe('0.1'); expect(testFullPrecisionGetYAxisFormattedValue('0.2', 'none')).toBe('0.2'); expect(testFullPrecisionGetYAxisFormattedValue('0.3', 'none')).toBe('0.3'); @@ -115,7 +115,7 @@ describe('getYAxisFormattedValue - none (full precision legacy assertions)', () }); describe('getYAxisFormattedValue - units (full precision legacy assertions)', () => { - test('ms', () => { + it('ms', () => { expect(testFullPrecisionGetYAxisFormattedValue('1500', 'ms')).toBe('1.5 s'); expect(testFullPrecisionGetYAxisFormattedValue('500', 'ms')).toBe('500 ms'); expect(testFullPrecisionGetYAxisFormattedValue('60000', 'ms')).toBe('1 min'); @@ -127,19 +127,19 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('s', () => { + it('s', () => { expect(testFullPrecisionGetYAxisFormattedValue('90', 's')).toBe('1.5 mins'); expect(testFullPrecisionGetYAxisFormattedValue('30', 's')).toBe('30 s'); expect(testFullPrecisionGetYAxisFormattedValue('3600', 's')).toBe('1 hour'); }); - test('m', () => { + it('m', () => { expect(testFullPrecisionGetYAxisFormattedValue('90', 'm')).toBe('1.5 hours'); expect(testFullPrecisionGetYAxisFormattedValue('30', 'm')).toBe('30 min'); expect(testFullPrecisionGetYAxisFormattedValue('1440', 'm')).toBe('1 day'); }); - test('bytes', () => { + it('bytes', () => { expect(testFullPrecisionGetYAxisFormattedValue('1024', 'bytes')).toBe( '1 KiB', ); @@ -149,7 +149,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('mbytes', () => { + it('mbytes', () => { expect(testFullPrecisionGetYAxisFormattedValue('1024', 'mbytes')).toBe( '1 GiB', ); @@ -161,7 +161,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('kbytes', () => { + it('kbytes', () => { expect(testFullPrecisionGetYAxisFormattedValue('1024', 'kbytes')).toBe( '1 MiB', ); @@ -173,7 +173,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('short', () => { + it('short', () => { expect(testFullPrecisionGetYAxisFormattedValue('1000', 'short')).toBe('1 K'); expect(testFullPrecisionGetYAxisFormattedValue('1500', 'short')).toBe( '1.5 K', @@ -201,7 +201,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('percent', () => { + it('percent', () => { expect(testFullPrecisionGetYAxisFormattedValue('0.15', 'percent')).toBe( '0.15%', ); @@ -235,7 +235,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ).toBe('1.005555555595959%'); }); - test('ratio', () => { + it('ratio', () => { expect(testFullPrecisionGetYAxisFormattedValue('0.5', 'ratio')).toBe( '0.5 ratio', ); @@ -247,7 +247,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('temperature units', () => { + it('temperature units', () => { expect(testFullPrecisionGetYAxisFormattedValue('25', 'celsius')).toBe( '25 °C', ); @@ -267,13 +267,13 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () ); }); - test('ms edge cases', () => { + it('ms edge cases', () => { expect(testFullPrecisionGetYAxisFormattedValue('0', 'ms')).toBe('0 ms'); expect(testFullPrecisionGetYAxisFormattedValue('-1500', 'ms')).toBe('-1.5 s'); expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'ms')).toBe('∞'); }); - test('bytes edge cases', () => { + it('bytes edge cases', () => { expect(testFullPrecisionGetYAxisFormattedValue('0', 'bytes')).toBe('0 B'); expect(testFullPrecisionGetYAxisFormattedValue('-1024', 'bytes')).toBe( '-1 KiB', @@ -282,7 +282,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', () }); describe('getYAxisFormattedValue - precision option tests', () => { - test('precision 0 drops decimal part', () => { + it('precision 0 drops decimal part', () => { expect(getYAxisFormattedValue('1.2345', 'none', 0)).toBe('1'); expect(getYAxisFormattedValue('0.9999', 'none', 0)).toBe('0'); expect(getYAxisFormattedValue('12345.6789', 'none', 0)).toBe('12345'); @@ -294,7 +294,7 @@ describe('getYAxisFormattedValue - precision option tests', () => { // with unit expect(getYAxisFormattedValue('4353.81', 'ms', 0)).toBe('4 s'); }); - test('precision 1,2,3,4 decimals', () => { + it('precision 1,2,3,4 decimals', () => { expect(getYAxisFormattedValue('1.2345', 'none', 1)).toBe('1.2'); expect(getYAxisFormattedValue('1.2345', 'none', 2)).toBe('1.23'); expect(getYAxisFormattedValue('1.2345', 'none', 3)).toBe('1.234'); @@ -345,7 +345,7 @@ describe('getYAxisFormattedValue - precision option tests', () => { expect(getYAxisFormattedValue('0.123456', 'percent', 4)).toBe('0.1235%'); // approximation }); - test('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => { + it('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => { expect( getYAxisFormattedValue( '0.00002625429914148441', diff --git a/frontend/src/components/InviteMembersModal/__tests__/InviteMembersModal.test.tsx b/frontend/src/components/InviteMembersModal/__tests__/InviteMembersModal.test.tsx index 558b1d9151..f8192c2a8f 100644 --- a/frontend/src/components/InviteMembersModal/__tests__/InviteMembersModal.test.tsx +++ b/frontend/src/components/InviteMembersModal/__tests__/InviteMembersModal.test.tsx @@ -90,11 +90,11 @@ describe('InviteMembersModal', () => { screen.getByRole('button', { name: /invite team members/i }), ); - expect( - await screen.findByText( + await expect( + screen.findByText( 'Please enter valid emails and select roles for team members', ), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); }); it('shows email-only message when email is invalid but role is selected', async () => { @@ -112,9 +112,9 @@ describe('InviteMembersModal', () => { screen.getByRole('button', { name: /invite team members/i }), ); - expect( - await screen.findByText('Please enter valid emails for team members'), - ).toBeInTheDocument(); + await expect( + screen.findByText('Please enter valid emails for team members'), + ).resolves.toBeInTheDocument(); }); it('shows role-only message when email is valid but role is missing', async () => { @@ -130,9 +130,9 @@ describe('InviteMembersModal', () => { screen.getByRole('button', { name: /invite team members/i }), ); - expect( - await screen.findByText('Please select roles for team members'), - ).toBeInTheDocument(); + await expect( + screen.findByText('Please select roles for team members'), + ).resolves.toBeInTheDocument(); }); }); diff --git a/frontend/src/components/Logs/LogStateIndicator/utils.test.ts b/frontend/src/components/Logs/LogStateIndicator/utils.test.ts index bad5147972..e2e062365e 100644 --- a/frontend/src/components/Logs/LogStateIndicator/utils.test.ts +++ b/frontend/src/components/Logs/LogStateIndicator/utils.test.ts @@ -27,7 +27,7 @@ describe('getLogIndicatorType', () => { expect(getLogIndicatorType(log)).toBe('TRACE'); }); - it('severity_text should be used when severity_number is absent ', () => { + it('severity_text should be used when severity_number is absent', () => { const log = { date: '2024-02-29T12:34:46Z', timestamp: 1646115296, diff --git a/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx b/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx index bac05ae4cd..72d9ce13d5 100644 --- a/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx +++ b/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx @@ -75,7 +75,7 @@ function OptionsMenu({ }; const handleSearchValueChange = useDebouncedFn((event): void => { - // @ts-ignore + // @ts-expect-error const value = event?.target?.value || ''; if (addColumn && addColumn?.onSearch) { diff --git a/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx b/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx index 1f8f0465b5..48f22e5575 100644 --- a/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx +++ b/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx @@ -50,7 +50,7 @@ function Code({ const match = /language-(\w+)/.exec(className || ''); return !inline && match ? ( Pre({ diff --git a/frontend/src/components/NewSelect/__test__/CustomMultiSelect.comprehensive.test.tsx b/frontend/src/components/NewSelect/__test__/CustomMultiSelect.comprehensive.test.tsx index a77abf2ddd..6b29585308 100644 --- a/frontend/src/components/NewSelect/__test__/CustomMultiSelect.comprehensive.test.tsx +++ b/frontend/src/components/NewSelect/__test__/CustomMultiSelect.comprehensive.test.tsx @@ -61,7 +61,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 1. CUSTOM VALUES SUPPORT ===== describe('Custom Values Support (CS)', () => { - test('CS-01: Custom values persist in selected state', async () => { + it('CS-01: Custom values persist in selected state', async () => { const { rerender } = renderWithVirtuoso( { expect(screen.getByText('another-custom')).toBeInTheDocument(); }); - test('CS-02: Partial matches create custom values', async () => { + it('CS-02: Partial matches create custom values', async () => { renderWithVirtuoso( { expect(combobox).toBeInTheDocument(); }); - test('CS-03: Exact match filtering behavior', async () => { + it('CS-03: Exact match filtering behavior', async () => { renderWithVirtuoso( , ); @@ -176,7 +176,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('CS-04: Search filtering with "end" pattern', async () => { + it('CS-04: Search filtering with "end" pattern', async () => { renderWithVirtuoso( , ); @@ -234,7 +234,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('CS-05: Comma-separated values behavior', async () => { + it('CS-05: Comma-separated values behavior', async () => { renderWithVirtuoso( , ); @@ -281,7 +281,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 2. SEARCH AND FILTERING ===== describe('Search and Filtering (SF)', () => { - test('SF-01: Selected values pushed to top', async () => { + it('SF-01: Selected values pushed to top', async () => { renderWithVirtuoso( { }); }); - test('SF-02: Filtering with search text', async () => { + it('SF-02: Filtering with search text', async () => { renderWithVirtuoso( , ); @@ -350,7 +350,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('SF-03: Highlighting search matches', async () => { + it('SF-03: Highlighting search matches', async () => { renderWithVirtuoso( , ); @@ -381,7 +381,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('SF-04: Search with no results', async () => { + it('SF-04: Search with no results', async () => { renderWithVirtuoso( , ); @@ -424,7 +424,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 3. KEYBOARD NAVIGATION ===== describe('Keyboard Navigation (KN)', () => { - test('KN-01: Arrow key navigation in dropdown', async () => { + it('KN-01: Arrow key navigation in dropdown', async () => { renderWithVirtuoso( { }); }); - test('KN-02: Tab navigation to dropdown', async () => { + it('KN-02: Tab navigation to dropdown', async () => { renderWithVirtuoso(
@@ -515,7 +515,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('KN-03: Enter selection in dropdown', async () => { + it('KN-03: Enter selection in dropdown', async () => { renderWithVirtuoso( { expect(mockOnChange).toHaveBeenCalledWith(['frontend'], ['frontend']); }); - test('KN-04: Chip deletion with keyboard', async () => { + it('KN-04: Chip deletion with keyboard', async () => { renderWithVirtuoso( { // ===== 5. UI/UX BEHAVIORS ===== describe('UI/UX Behaviors (UI)', () => { - test('UI-01: Loading state does not block interaction', async () => { + it('UI-01: Loading state does not block interaction', async () => { renderWithVirtuoso( , ); @@ -603,7 +603,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('UI-02: Component remains editable in all states', async () => { + it('UI-02: Component remains editable in all states', async () => { renderWithVirtuoso( , ); @@ -634,7 +634,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { expect(combobox).not.toBeDisabled(); }); - test('UI-03: Toggle/Only labels in dropdown', async () => { + it('UI-03: Toggle/Only labels in dropdown', async () => { renderWithVirtuoso( { }); }); - test('UI-04: Should display values with loading info at bottom', async () => { + it('UI-04: Should display values with loading info at bottom', async () => { renderWithVirtuoso( , ); @@ -677,7 +677,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('UI-05: Error state display in footer', async () => { + it('UI-05: Error state display in footer', async () => { renderWithVirtuoso( { }); }); - test('UI-06: No data state display', async () => { + it('UI-06: No data state display', async () => { renderWithVirtuoso( { // ===== 6. CLEAR ACTIONS ===== describe('Clear Actions (CA)', () => { - test('CA-01: Ctrl+A selects all chips', async () => { + it('CA-01: Ctrl+A selects all chips', async () => { renderWithVirtuoso( { }); }); - test('CA-02: Clear icon removes all selections', async () => { + it('CA-02: Clear icon removes all selections', async () => { renderWithVirtuoso( { } }); - test('CA-03: Individual chip removal', async () => { + it('CA-03: Individual chip removal', async () => { renderWithVirtuoso( { const removeButtons = document.querySelectorAll( '.ant-select-selection-item-remove', ); - expect(removeButtons.length).toBe(2); + expect(removeButtons).toHaveLength(2); await user.click(removeButtons[1] as Element); @@ -804,7 +804,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 7. SAVE AND SELECTION TRIGGERS ===== describe('Save and Selection Triggers (ST)', () => { - test('ST-01: ESC triggers save action', async () => { + it('ST-01: ESC triggers save action', async () => { const mockDropdownChange = jest.fn(); renderWithVirtuoso( @@ -837,7 +837,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('ST-02: Mouse selection works', async () => { + it('ST-02: Mouse selection works', async () => { renderWithVirtuoso( , ); @@ -859,7 +859,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { ); }); - test('ST-03: ENTER in input field creates custom value', async () => { + it('ST-03: ENTER in input field creates custom value', async () => { renderWithVirtuoso( , ); @@ -892,7 +892,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('ST-04: Search text persistence', async () => { + it('ST-04: Search text persistence', async () => { renderWithVirtuoso( , ); @@ -932,7 +932,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 8. SPECIAL OPTIONS AND STATES ===== describe('Special Options and States (SO)', () => { - test('SO-01: ALL option appears first and separated', async () => { + it('SO-01: ALL option appears first and separated', async () => { renderWithVirtuoso( { }); }); - test('SO-02: ALL selection behavior', async () => { + it('SO-02: ALL selection behavior', async () => { renderWithVirtuoso( { ); }); - test('SO-03: ALL tag display when all selected', () => { + it('SO-03: ALL tag display when all selected', () => { renderWithVirtuoso( { expect(screen.queryByText('frontend')).not.toBeInTheDocument(); }); - test('SO-04: Footer information display', async () => { + it('SO-04: Footer information display', async () => { renderWithVirtuoso( , ); @@ -1017,7 +1017,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== GROUPED OPTIONS SUPPORT ===== describe('Grouped Options Support', () => { - test('handles grouped options correctly', async () => { + it('handles grouped options correctly', async () => { renderWithVirtuoso( , ); @@ -1041,7 +1041,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== ACCESSIBILITY TESTS ===== describe('Accessibility', () => { - test('has proper ARIA attributes', async () => { + it('has proper ARIA attributes', async () => { renderWithVirtuoso( , ); @@ -1058,7 +1058,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('supports screen reader navigation', async () => { + it('supports screen reader navigation', async () => { renderWithVirtuoso( , ); @@ -1079,7 +1079,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 9. ADVANCED KEYBOARD NAVIGATION ===== describe('Advanced Keyboard Navigation (AKN)', () => { - test('AKN-01: Shift + Arrow + Del chip deletion', async () => { + it('AKN-01: Shift + Arrow + Del chip deletion', async () => { renderWithVirtuoso( { expect(combobox).toHaveFocus(); }); - test('AKN-03: Mouse out closes dropdown', async () => { + it('AKN-03: Mouse out closes dropdown', async () => { renderWithVirtuoso( , ); @@ -1164,7 +1164,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 10. ADVANCED FILTERING AND HIGHLIGHTING ===== describe('Advanced Filtering and Highlighting (AFH)', () => { - test('AFH-01: Highlighted values pushed to top', async () => { + it('AFH-01: Highlighted values pushed to top', async () => { renderWithVirtuoso( , ); @@ -1220,7 +1220,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { }); }); - test('AFH-02: Distinction between selection Enter and save Enter', async () => { + it('AFH-02: Distinction between selection Enter and save Enter', async () => { renderWithVirtuoso( , ); @@ -1267,7 +1267,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 11. ADVANCED CLEAR ACTIONS ===== describe('Advanced Clear Actions (ACA)', () => { - test('ACA-01: Clear action waiting behavior', async () => { + it('ACA-01: Clear action waiting behavior', async () => { const mockOnChangeWithDelay = jest.fn().mockImplementation( () => new Promise((resolve) => { @@ -1300,7 +1300,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 12. ADVANCED UI STATES ===== describe('Advanced UI States (AUS)', () => { - test('AUS-01: No data with previous value selected', async () => { + it('AUS-01: No data with previous value selected', async () => { renderWithVirtuoso( { expect(screen.getByText('previous-value')).toBeInTheDocument(); }); - test('AUS-02: Always editable accessibility', async () => { + it('AUS-02: Always editable accessibility', async () => { renderWithVirtuoso( , ); @@ -1338,7 +1338,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { expect(combobox).not.toBeDisabled(); }); - test('AUS-03: Sufficient space for search value', async () => { + it('AUS-03: Sufficient space for search value', async () => { renderWithVirtuoso( , ); @@ -1372,7 +1372,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 13. REGEX AND CUSTOM VALUES ===== describe('Regex and Custom Values (RCV)', () => { - test('RCV-01: Regex pattern support', async () => { + it('RCV-01: Regex pattern support', async () => { renderWithVirtuoso( { }); }); - test('RCV-02: Custom values treated as normal dropdown values', async () => { + it('RCV-02: Custom values treated as normal dropdown values', async () => { const customOptions = [ ...mockOptions, { label: 'custom-value', value: 'custom-value', type: 'custom' as const }, @@ -1456,7 +1456,7 @@ describe('CustomMultiSelect - Comprehensive Tests', () => { // ===== 14. DROPDOWN PERSISTENCE ===== describe('Dropdown Persistence (DP)', () => { - test('DP-01: Dropdown stays open for non-save actions', async () => { + it('DP-01: Dropdown stays open for non-save actions', async () => { renderWithVirtuoso( , ); diff --git a/frontend/src/components/NewSelect/__test__/CustomSelect.comprehensive.test.tsx b/frontend/src/components/NewSelect/__test__/CustomSelect.comprehensive.test.tsx index d83d2405cd..df982c8ec3 100644 --- a/frontend/src/components/NewSelect/__test__/CustomSelect.comprehensive.test.tsx +++ b/frontend/src/components/NewSelect/__test__/CustomSelect.comprehensive.test.tsx @@ -50,7 +50,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 1. CUSTOM VALUES SUPPORT ===== describe('Custom Values Support (CS)', () => { - test('CS-02: Partial matches create custom values', async () => { + it('CS-02: Partial matches create custom values', async () => { render( { }); }); - test('CS-03: Exact match behavior', async () => { + it('CS-03: Exact match behavior', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -161,7 +161,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 2. SEARCH AND FILTERING ===== describe('Search and Filtering (SF)', () => { - test('SF-01: Selected values pushed to top', async () => { + it('SF-01: Selected values pushed to top', async () => { render( { }); }); - test('SF-02: Real-time search filtering', async () => { + it('SF-02: Real-time search filtering', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -228,7 +228,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('SF-03: Search highlighting', async () => { + it('SF-03: Search highlighting', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -257,7 +257,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('SF-04: Search with partial matches', async () => { + it('SF-04: Search with partial matches', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -298,7 +298,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 3. KEYBOARD NAVIGATION ===== describe('Keyboard Navigation (KN)', () => { - test('KN-01: Arrow key navigation in dropdown', async () => { + it('KN-01: Arrow key navigation in dropdown', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -329,7 +329,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('KN-02: Tab navigation to dropdown', async () => { + it('KN-02: Tab navigation to dropdown', async () => { render(
@@ -355,7 +355,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('KN-03: Enter selection in dropdown', async () => { + it('KN-03: Enter selection in dropdown', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -376,7 +376,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('KN-04: Space key selection', async () => { + it('KN-04: Space key selection', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -396,7 +396,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('KN-05: Tab navigation within dropdown', async () => { + it('KN-05: Tab navigation within dropdown', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -417,7 +417,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 4. UI/UX BEHAVIORS ===== describe('UI/UX Behaviors (UI)', () => { - test('UI-01: Loading state does not block interaction', async () => { + it('UI-01: Loading state does not block interaction', async () => { render( , ); @@ -429,7 +429,7 @@ describe('CustomSelect - Comprehensive Tests', () => { expect(combobox).toHaveFocus(); }); - test('UI-02: Component remains editable in all states', () => { + it('UI-02: Component remains editable in all states', () => { render( { expect(combobox).not.toBeDisabled(); }); - test('UI-03: Loading state display in footer', async () => { + it('UI-03: Loading state display in footer', async () => { render( , ); @@ -458,7 +458,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('UI-04: Error state display in footer', async () => { + it('UI-04: Error state display in footer', async () => { render( { }); }); - test('UI-05: No data state display', async () => { + it('UI-05: No data state display', async () => { render( { // ===== 6. SAVE AND SELECTION TRIGGERS ===== describe('Save and Selection Triggers (ST)', () => { - test('ST-01: Mouse selection works', async () => { + it('ST-01: Mouse selection works', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -520,7 +520,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 7. GROUPED OPTIONS SUPPORT ===== describe('Grouped Options Support', () => { - test('handles grouped options correctly', async () => { + it('handles grouped options correctly', async () => { render( , ); @@ -541,7 +541,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('grouped option selection works', async () => { + it('grouped option selection works', async () => { render( , ); @@ -566,7 +566,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 8. ACCESSIBILITY ===== describe('Accessibility', () => { - test('has proper ARIA attributes', async () => { + it('has proper ARIA attributes', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -580,7 +580,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('supports screen reader navigation', async () => { + it('supports screen reader navigation', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -596,7 +596,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('has proper focus management', async () => { + it('has proper focus management', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -617,7 +617,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 10. EDGE CASES ===== describe('Edge Cases', () => { - test('handles special characters in options', async () => { + it('handles special characters in options', async () => { const specialOptions = [ { label: 'Option with spaces', value: 'option-with-spaces' }, { label: 'Option-with-dashes', value: 'option-with-dashes' }, @@ -638,7 +638,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('handles extremely long option labels', async () => { + it('handles extremely long option labels', async () => { const longLabelOptions = [ { label: @@ -663,7 +663,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 11. ADVANCED KEYBOARD NAVIGATION ===== describe('Advanced Keyboard Navigation (AKN)', () => { - test('AKN-01: Mouse out closes dropdown', async () => { + it('AKN-01: Mouse out closes dropdown', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -684,7 +684,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('AKN-02: TAB navigation from input to dropdown', async () => { + it('AKN-02: TAB navigation from input to dropdown', async () => { render(
@@ -722,7 +722,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 12. ADVANCED FILTERING AND HIGHLIGHTING ===== describe('Advanced Filtering and Highlighting (AFH)', () => { - test('AFH-01: Highlighted values pushed to top', async () => { + it('AFH-01: Highlighted values pushed to top', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -776,7 +776,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('AFH-02: Distinction between selection Enter and save Enter', async () => { + it('AFH-02: Distinction between selection Enter and save Enter', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -830,7 +830,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 13. ADVANCED CLEAR ACTIONS ===== describe('Advanced Clear Actions (ACA)', () => { - test('ACA-01: Clear action waiting behavior', async () => { + it('ACA-01: Clear action waiting behavior', async () => { const mockOnChangeWithDelay = jest.fn().mockImplementation( () => new Promise((resolve) => { @@ -860,7 +860,7 @@ describe('CustomSelect - Comprehensive Tests', () => { expect(mockOnChangeWithDelay).toHaveBeenCalled(); }); - test('ACA-02: Single select clear behavior like text input', async () => { + it('ACA-02: Single select clear behavior like text input', async () => { render( { // ===== 14. ADVANCED UI STATES ===== describe('Advanced UI States (AUS)', () => { - test('AUS-01: No data with previous value selected', async () => { + it('AUS-01: No data with previous value selected', async () => { render( { expect(screen.getAllByText('previous-value')).toHaveLength(2); }); - test('AUS-02: Always editable accessibility', async () => { + it('AUS-02: Always editable accessibility', async () => { render( , ); @@ -921,7 +921,7 @@ describe('CustomSelect - Comprehensive Tests', () => { expect(combobox).not.toBeDisabled(); }); - test('AUS-03: Sufficient space for search value', async () => { + it('AUS-03: Sufficient space for search value', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -950,7 +950,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('AUS-04: No spinners blocking user interaction', async () => { + it('AUS-04: No spinners blocking user interaction', async () => { render( , ); @@ -976,7 +976,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 15. REGEX AND CUSTOM VALUES ===== describe('Regex and Custom Values (RCV)', () => { - test('RCV-01: Regex pattern support', async () => { + it('RCV-01: Regex pattern support', async () => { render(); const combobox = screen.getByRole('combobox'); @@ -1019,7 +1019,7 @@ describe('CustomSelect - Comprehensive Tests', () => { }); }); - test('RCV-02: Custom values treated as normal dropdown values', async () => { + it('RCV-02: Custom values treated as normal dropdown values', async () => { const customOptions = [ ...mockOptions, { label: 'custom-value', value: 'custom-value', type: 'custom' as const }, @@ -1051,7 +1051,7 @@ describe('CustomSelect - Comprehensive Tests', () => { // ===== 16. DROPDOWN PERSISTENCE ===== describe('Dropdown Persistence (DP)', () => { - test('DP-01: Dropdown closes only on save actions', async () => { + it('DP-01: Dropdown closes only on save actions', async () => { render(); const combobox = screen.getByRole('combobox'); diff --git a/frontend/src/components/NewSelect/__test__/VariableItem.integration.test.tsx b/frontend/src/components/NewSelect/__test__/VariableItem.integration.test.tsx index c57d0b8b1d..c1f4b5ea1a 100644 --- a/frontend/src/components/NewSelect/__test__/VariableItem.integration.test.tsx +++ b/frontend/src/components/NewSelect/__test__/VariableItem.integration.test.tsx @@ -86,7 +86,7 @@ describe('VariableItem Integration Tests', () => { // ===== 1. INTEGRATION WITH CUSTOMSELECT ===== describe('CustomSelect Integration (VI)', () => { - test('VI-01: Single select variable integration', async () => { + it('VI-01: Single select variable integration', async () => { const variable = createMockVariable({ multiSelect: false, type: 'CUSTOM', @@ -130,7 +130,7 @@ describe('VariableItem Integration Tests', () => { // ===== 2. INTEGRATION WITH CUSTOMMULTISELECT ===== describe('CustomMultiSelect Integration (VI)', () => { - test('VI-02: Multi select variable integration', async () => { + it('VI-02: Multi select variable integration', async () => { const variable = createMockVariable({ multiSelect: true, type: 'CUSTOM', @@ -174,7 +174,7 @@ describe('VariableItem Integration Tests', () => { // ===== 3. TEXTBOX VARIABLE TYPE ===== describe('Textbox Variable Integration', () => { - test('VI-03: Textbox variable handling', async () => { + it('VI-03: Textbox variable handling', async () => { const variable = createMockVariable({ type: 'TEXTBOX', selectedValue: 'initial-value', @@ -219,7 +219,7 @@ describe('VariableItem Integration Tests', () => { // ===== 4. VALUE PERSISTENCE AND STATE MANAGEMENT ===== describe('Value Persistence and State Management', () => { - test('VI-04: All selected state handling', () => { + it('VI-04: All selected state handling', () => { const variable = createMockVariable({ multiSelect: true, type: 'CUSTOM', @@ -243,7 +243,7 @@ describe('VariableItem Integration Tests', () => { expect(screen.getByText('ALL')).toBeInTheDocument(); }); - test('VI-05: Dropdown behavior with temporary selections', async () => { + it('VI-05: Dropdown behavior with temporary selections', async () => { const variable = createMockVariable({ multiSelect: true, type: 'CUSTOM', @@ -277,7 +277,7 @@ describe('VariableItem Integration Tests', () => { // ===== 6. ACCESSIBILITY AND USER EXPERIENCE ===== describe('Accessibility and User Experience', () => { - test('VI-06: Variable description tooltip', async () => { + it('VI-06: Variable description tooltip', async () => { const variable = createMockVariable({ description: 'This variable controls the service selection', type: 'CUSTOM', @@ -310,7 +310,7 @@ describe('VariableItem Integration Tests', () => { }); }); - test('VI-07: Variable name display', () => { + it('VI-07: Variable name display', () => { const variable = createMockVariable({ name: 'service_name', type: 'CUSTOM', @@ -331,7 +331,7 @@ describe('VariableItem Integration Tests', () => { expect(screen.getByText('$service_name')).toBeInTheDocument(); }); - test('VI-08: Max tag count behavior', async () => { + it('VI-08: Max tag count behavior', async () => { const variable = createMockVariable({ multiSelect: true, type: 'CUSTOM', @@ -365,7 +365,7 @@ describe('VariableItem Integration Tests', () => { // ===== 8. SEARCH INTERACTION TESTS ===== describe('Search Interaction Tests', () => { - test('VI-14: Search persistence across dropdown open/close', async () => { + it('VI-14: Search persistence across dropdown open/close', async () => { const variable = createMockVariable({ type: 'CUSTOM', customValue: 'option1,option2,option3', @@ -417,7 +417,7 @@ describe('VariableItem Integration Tests', () => { // ===== 9. ADVANCED KEYBOARD NAVIGATION ===== describe('Advanced Keyboard Navigation (VI)', () => { - test('VI-15: Shift + Arrow + Del chip deletion in multiselect', async () => { + it('VI-15: Shift + Arrow + Del chip deletion in multiselect', async () => { const variable = createMockVariable({ type: 'CUSTOM', customValue: 'option1,option2,option3', @@ -461,7 +461,7 @@ describe('VariableItem Integration Tests', () => { // ===== 11. ADVANCED UI STATES ===== describe('Advanced UI States (VI)', () => { - test('VI-19: No data with previous value selected in variable', async () => { + it('VI-19: No data with previous value selected in variable', async () => { const variable = createMockVariable({ type: 'CUSTOM', customValue: '', @@ -499,7 +499,7 @@ describe('VariableItem Integration Tests', () => { expect(combobox).toBeInTheDocument(); }); - test('VI-20: Always editable accessibility in variable', async () => { + it('VI-20: Always editable accessibility in variable', async () => { const variable = createMockVariable({ type: 'CUSTOM', customValue: 'option1,option2', @@ -530,7 +530,7 @@ describe('VariableItem Integration Tests', () => { // ===== 13. DROPDOWN PERSISTENCE ===== describe('Dropdown Persistence (VI)', () => { - test('VI-24: Dropdown stays open for non-save actions in variable', async () => { + it('VI-24: Dropdown stays open for non-save actions in variable', async () => { const variable = createMockVariable({ type: 'CUSTOM', customValue: 'option1,option2,option3', diff --git a/frontend/src/components/OverflowInputToolTip/OverflowInputToolTip.test.tsx b/frontend/src/components/OverflowInputToolTip/OverflowInputToolTip.test.tsx index 334b74fc9a..2ff39dc13b 100644 --- a/frontend/src/components/OverflowInputToolTip/OverflowInputToolTip.test.tsx +++ b/frontend/src/components/OverflowInputToolTip/OverflowInputToolTip.test.tsx @@ -44,7 +44,7 @@ describe('OverflowInputToolTip', () => { jest.restoreAllMocks(); }); - test('shows tooltip when content overflows and input is clamped at maxAutoWidth', async () => { + it('shows tooltip when content overflows and input is clamped at maxAutoWidth', async () => { mockOverflow(150, 250); // clientWidth >= maxAutoWidth (150), scrollWidth > clientWidth render(); @@ -64,7 +64,7 @@ describe('OverflowInputToolTip', () => { ).toBeInTheDocument(); }); - test('does NOT show tooltip when content does not overflow', async () => { + it('does NOT show tooltip when content does not overflow', async () => { mockOverflow(150, 100); // content fits (scrollWidth <= clientWidth) render(); @@ -76,7 +76,7 @@ describe('OverflowInputToolTip', () => { }); }); - test('does NOT show tooltip when content overflows but input is NOT at maxAutoWidth', async () => { + it('does NOT show tooltip when content overflows but input is NOT at maxAutoWidth', async () => { mockOverflow(100, 250); // clientWidth < maxAutoWidth (150), scrollWidth > clientWidth render(); @@ -88,7 +88,7 @@ describe('OverflowInputToolTip', () => { }); }); - test('uncontrolled input allows typing', async () => { + it('uncontrolled input allows typing', async () => { render(); const input = screen.getByRole('textbox') as HTMLInputElement; @@ -97,7 +97,7 @@ describe('OverflowInputToolTip', () => { expect(input).toHaveValue('InitABC'); }); - test('disabled input never shows tooltip even if overflowing', async () => { + it('disabled input never shows tooltip even if overflowing', async () => { mockOverflow(150, 300); render(); @@ -109,7 +109,7 @@ describe('OverflowInputToolTip', () => { }); }); - test('renders mirror span and input correctly (structural assertions instead of snapshot)', () => { + it('renders mirror span and input correctly (structural assertions instead of snapshot)', () => { const { container } = render(); const mirror = container.querySelector('.overflow-input-mirror'); const input = container.querySelector('input') as HTMLInputElement | null; diff --git a/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/traceOperatorContextUtils.test.ts b/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/traceOperatorContextUtils.test.ts index 28e1cb4f95..28bd69c23c 100644 --- a/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/traceOperatorContextUtils.test.ts +++ b/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/traceOperatorContextUtils.test.ts @@ -29,7 +29,7 @@ describe('traceOperatorContextUtils', () => { null, ); - expect(context).toEqual({ + expect(context).toStrictEqual({ tokenType: TraceOperatorGrammarLexer.IDENTIFIER, text: 'test', start: 0, @@ -62,7 +62,7 @@ describe('traceOperatorContextUtils', () => { false, ); - expect(context).toEqual({ + expect(context).toStrictEqual({ tokenType: TraceOperatorGrammarLexer.IDENTIFIER, text: 'test', start: 0, @@ -193,7 +193,7 @@ describe('traceOperatorContextUtils', () => { it('should return default context for empty query', () => { const result = getTraceOperatorContextAtCursor('', 0); - expect(result).toEqual({ + expect(result).toStrictEqual({ tokenType: -1, text: '', start: 0, @@ -211,7 +211,7 @@ describe('traceOperatorContextUtils', () => { it('should return default context for null query', () => { const result = getTraceOperatorContextAtCursor(null as any, 0); - expect(result).toEqual({ + expect(result).toStrictEqual({ tokenType: -1, text: '', start: 0, @@ -229,7 +229,7 @@ describe('traceOperatorContextUtils', () => { it('should return default context for undefined query', () => { const result = getTraceOperatorContextAtCursor(undefined as any, 0); - expect(result).toEqual({ + expect(result).toStrictEqual({ tokenType: -1, text: '', start: 0, diff --git a/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/utils.test.ts b/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/utils.test.ts index 88443842ce..148cac59c6 100644 --- a/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/utils.test.ts +++ b/frontend/src/components/QueryBuilderV2/QueryV2/TraceOperator/__tests__/utils.test.ts @@ -8,21 +8,21 @@ const makeTraceOperator = (expression: string): IBuilderTraceOperator => describe('getInvolvedQueriesInTraceOperator', () => { it('returns empty array for empty input', () => { const result = getInvolvedQueriesInTraceOperator([]); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('extracts identifiers from expression', () => { const result = getInvolvedQueriesInTraceOperator([ makeTraceOperator('A => B'), ]); - expect(result).toEqual(['A', 'B']); + expect(result).toStrictEqual(['A', 'B']); }); it('extracts identifiers from complex expression', () => { const result = getInvolvedQueriesInTraceOperator([ makeTraceOperator('A => (NOT B || C)'), ]); - expect(result).toEqual(['A', 'B', 'C']); + expect(result).toStrictEqual(['A', 'B', 'C']); }); it('filters out querynames from complex expression', () => { @@ -31,7 +31,7 @@ describe('getInvolvedQueriesInTraceOperator', () => { '(A1 && (NOT B2 || (C3 -> (D4 && E5)))) => ((F6 || G7) && (NOT (H8 -> I9)))', ), ]); - expect(result).toEqual([ + expect(result).toStrictEqual([ 'A1', 'B2', 'C3', diff --git a/frontend/src/components/QueryBuilderV2/__tests__/previousQuery.utils.test.ts b/frontend/src/components/QueryBuilderV2/__tests__/previousQuery.utils.test.ts index 030ed067e4..098fe1bb6c 100644 --- a/frontend/src/components/QueryBuilderV2/__tests__/previousQuery.utils.test.ts +++ b/frontend/src/components/QueryBuilderV2/__tests__/previousQuery.utils.test.ts @@ -85,7 +85,7 @@ describe('previousQuery.utils', () => { saveAsPreviousQuery(key, sampleQuery); const fromStore = getPreviousQueryFromKey(key); - expect(fromStore).toEqual(sampleQuery); + expect(fromStore).toStrictEqual(sampleQuery); }); it('saveAsPreviousQuery merges multiple entries and removeKeyFromPreviousQuery deletes one', () => { diff --git a/frontend/src/components/QueryBuilderV2/__tests__/utils.test.ts b/frontend/src/components/QueryBuilderV2/__tests__/utils.test.ts index feff0667f9..0d77c05aeb 100644 --- a/frontend/src/components/QueryBuilderV2/__tests__/utils.test.ts +++ b/frontend/src/components/QueryBuilderV2/__tests__/utils.test.ts @@ -22,18 +22,20 @@ describe('convertFiltersToExpression', () => { it('should handle empty, null, and undefined inputs', () => { // Test null and undefined - expect(convertFiltersToExpression(null as any)).toEqual({ expression: '' }); - expect(convertFiltersToExpression(undefined as any)).toEqual({ + expect(convertFiltersToExpression(null as any)).toStrictEqual({ + expression: '', + }); + expect(convertFiltersToExpression(undefined as any)).toStrictEqual({ expression: '', }); // Test empty filters - expect(convertFiltersToExpression({ items: [], op: 'AND' })).toEqual({ + expect(convertFiltersToExpression({ items: [], op: 'AND' })).toStrictEqual({ expression: '', }); expect( convertFiltersToExpression({ items: undefined, op: 'AND' } as any), - ).toEqual({ expression: '' }); + ).toStrictEqual({ expression: '' }); }); it('should convert basic comparison operators with proper value formatting', () => { @@ -92,7 +94,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "service = 'api-gateway' AND status != 'error' AND duration > 100 AND count <= 50 AND is_active = true AND enabled = false AND count = 0 AND regex REGEXP '.*'", }); @@ -124,7 +126,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "message = 'user\\'s data' AND description = '' AND path = '/api/v1/users'", }); @@ -162,7 +164,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "service in ['api-gateway', 'user-service', 'auth-service'] AND status in ['success'] AND tags in [] AND name in ['John\\'s', 'Mary\\'s', 'Bob']", }); @@ -224,7 +226,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "service NOT IN ['api-gateway', 'user-service'] AND message NOT LIKE 'error' AND path NOT REGEXP '/api/.*' AND service NOT IN ['api-gateway'] AND user_id NOT EXISTS AND description NOT CONTAINS 'error' AND NOT has(tags, 'production') AND NOT hasAny(labels, ['env:prod', 'service:api'])", }); @@ -268,7 +270,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "user_id exists AND user_id exists AND has(tags, 'production') AND hasAny(tags, ['production', 'staging']) AND hasAll(tags, ['production', 'monitoring'])", }); @@ -312,7 +314,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "service = 'api-gateway' AND status = 'success' AND service in ['api-gateway']", }); @@ -362,7 +364,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "service in ['api-gateway', 'user-service'] AND user_id exists AND has(tags, 'production') AND duration > 100 AND status NOT IN ['error', 'timeout'] AND method = 'POST'", }); @@ -412,7 +414,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "count = 0 AND score > 100 AND limit >= 50 AND threshold < 1000 AND max_value <= 999 AND values in ['1', '2', '3', '4', '5']", }); @@ -456,7 +458,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "is_active = true AND is_deleted = false AND email = 'user@example.com' AND description = 'Contains \"quotes\" and \\'apostrophes\\'' AND path = '/api/v1/users/123?filter=true'", }); @@ -506,7 +508,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "has(tags, 'production') AND hasAny(labels, ['env:prod', 'service:api']) AND hasAll(metadata, ['version:1.0', 'team:backend']) AND services in ['api-gateway', 'user-service', 'auth-service', 'payment-service'] AND excluded_services NOT IN ['legacy-service', 'deprecated-service'] AND status_codes in ['200', '201', '400', '500']", }); @@ -544,7 +546,7 @@ describe('convertFiltersToExpression', () => { }; const result = convertFiltersToExpression(filters); - expect(result).toEqual({ + expect(result).toStrictEqual({ expression: "user_id NOT EXISTS AND description NOT CONTAINS 'error' AND NOT has(tags, 'production') AND NOT hasAny(labels, ['env:prod', 'service:api'])", }); @@ -568,7 +570,7 @@ describe('convertFiltersToExpression', () => { undefined, ); - expect(result.filters).toEqual(filters); + expect(result.filters).toStrictEqual(filters); expect(result.filter.expression).toBe("service.name = 'test-service'"); }); @@ -583,7 +585,7 @@ describe('convertFiltersToExpression', () => { undefined, ); - expect(result.filters).toEqual(filters); + expect(result.filters).toStrictEqual(filters); expect(result.filter.expression).toBe(''); }); @@ -611,7 +613,7 @@ describe('convertFiltersToExpression', () => { expect(result.filter).toBeDefined(); expect(result.filter.expression).toBe("service.name = 'updated-service'"); // Ensure parser can parse the existing query - expect(extractQueryPairs(existingQuery)).toEqual( + expect(extractQueryPairs(existingQuery)).toStrictEqual( expect.arrayContaining([ expect.objectContaining({ key: 'service.name', @@ -805,7 +807,7 @@ describe('convertAggregationToExpression', () => { temporality: 'delta', }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { metricName: 'test_metric', timeAggregation: 'avg', @@ -825,9 +827,11 @@ describe('convertAggregationToExpression', () => { spaceAggregation: 'noop', }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { metricName: 'test_metric', + reduceTo: undefined, + temporality: undefined, timeAggregation: 'count', spaceAggregation: 'count', }, @@ -841,9 +845,11 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.METRICS, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { metricName: '', + reduceTo: undefined, + temporality: undefined, timeAggregation: 'sum', spaceAggregation: 'sum', }, @@ -858,7 +864,7 @@ describe('convertAggregationToExpression', () => { alias: 'trace_alias', }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { expression: 'count(test_metric)', alias: 'trace_alias', @@ -874,7 +880,7 @@ describe('convertAggregationToExpression', () => { alias: 'log_alias', }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { expression: 'avg(test_metric)', alias: 'log_alias', @@ -889,7 +895,7 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.TRACES, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { expression: 'count()', }, @@ -903,7 +909,7 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.LOGS, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { expression: 'sum(test_metric)', }, @@ -917,9 +923,11 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.METRICS, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { metricName: 'test_metric', + reduceTo: undefined, + temporality: undefined, timeAggregation: 'max', spaceAggregation: 'max', }, @@ -933,7 +941,7 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.METRICS, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { metricName: 'test_metric', timeAggregation: 'sum', @@ -951,7 +959,7 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.TRACES, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { expression: 'count()', }, @@ -965,7 +973,7 @@ describe('convertAggregationToExpression', () => { dataSource: DataSource.LOGS, }); - expect(result).toEqual([ + expect(result).toStrictEqual([ { expression: 'count()', }, diff --git a/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.test.tsx b/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.test.tsx index 928b9bd7dd..3e686d54da 100644 --- a/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.test.tsx +++ b/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.test.tsx @@ -471,6 +471,6 @@ describe('CheckboxFilter - User Flows', () => { expect(filterForServiceName.key.key).toBe(SERVICE_NAME_KEY); expect(filterForServiceName.op).toBe('in'); - expect(filterForServiceName.value).toEqual(['mq-kafka', 'otel-demo']); + expect(filterForServiceName.value).toStrictEqual(['mq-kafka', 'otel-demo']); }); }); diff --git a/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx b/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx index 7206ac506f..413d6d8e11 100644 --- a/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx +++ b/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx @@ -323,7 +323,9 @@ describe('Quick Filters with custom filters', () => { const settingsButton = icon.closest('button') ?? icon; await user.click(settingsButton); - expect(await screen.findByText('Edit quick filters')).toBeInTheDocument(); + await expect( + screen.findByText('Edit quick filters'), + ).resolves.toBeInTheDocument(); const addedSection = screen.getByText(ADDED_FILTERS_LABEL).parentElement!; expect(addedSection).toContainElement( @@ -454,7 +456,7 @@ describe('Quick Filters with custom filters', () => { }); const requestBody = putHandler.mock.calls[0][0]; - expect(requestBody.filters).toEqual( + expect(requestBody.filters).toStrictEqual( expect.arrayContaining([ expect.not.objectContaining({ key: FILTER_OS_DESCRIPTION }), ]), @@ -535,12 +537,16 @@ describe('Quick Filters refetch behavior', () => { ); const { unmount } = render(); - expect(await screen.findByText(FILTER_SERVICE_NAME)).toBeInTheDocument(); + await expect( + screen.findByText(FILTER_SERVICE_NAME), + ).resolves.toBeInTheDocument(); unmount(); render(); - expect(await screen.findByText(FILTER_SERVICE_NAME)).toBeInTheDocument(); + await expect( + screen.findByText(FILTER_SERVICE_NAME), + ).resolves.toBeInTheDocument(); expect(getCalls).toBe(2); }); @@ -578,7 +584,9 @@ describe('Quick Filters refetch behavior', () => { const user = userEvent.setup({ pointerEventsCheck: 0 }); render(); - expect(await screen.findByText(FILTER_SERVICE_NAME)).toBeInTheDocument(); + await expect( + screen.findByText(FILTER_SERVICE_NAME), + ).resolves.toBeInTheDocument(); const icon = await screen.findByTestId(SETTINGS_ICON_TEST_ID); const settingsButton = icon.closest('button') ?? icon; @@ -628,7 +636,9 @@ describe('Quick Filters refetch behavior', () => { const user = userEvent.setup({ pointerEventsCheck: 0 }); render(); - expect(await screen.findByText(FILTER_SERVICE_NAME)).toBeInTheDocument(); + await expect( + screen.findByText(FILTER_SERVICE_NAME), + ).resolves.toBeInTheDocument(); const icon = await screen.findByTestId(SETTINGS_ICON_TEST_ID); const settingsButton = icon.closest('button') ?? icon; @@ -657,6 +667,8 @@ describe('Quick Filters refetch behavior', () => { render(); - expect(await screen.findByText('No filters found')).toBeInTheDocument(); + await expect( + screen.findByText('No filters found'), + ).resolves.toBeInTheDocument(); }); }); diff --git a/frontend/src/components/RouteTab/RouteTab.test.tsx b/frontend/src/components/RouteTab/RouteTab.test.tsx index 9ea7799a48..1d604b50e7 100644 --- a/frontend/src/components/RouteTab/RouteTab.test.tsx +++ b/frontend/src/components/RouteTab/RouteTab.test.tsx @@ -28,7 +28,7 @@ const testRoutes: RouteTabProps['routes'] = [ ]; describe('RouteTab component', () => { - test('renders correctly', () => { + it('renders correctly', () => { const history = createMemoryHistory(); render( @@ -39,7 +39,7 @@ describe('RouteTab component', () => { expect(screen.getByRole('tab', { name: 'Tab2' })).toBeInTheDocument(); }); - test('renders correct number of tabs', () => { + it('renders correct number of tabs', () => { const history = createMemoryHistory(); render( @@ -47,10 +47,10 @@ describe('RouteTab component', () => { , ); const tabs = screen.getAllByRole('tab'); - expect(tabs.length).toBe(testRoutes.length); + expect(tabs).toHaveLength(testRoutes.length); }); - test('sets provided activeKey as active tab', () => { + it('sets provided activeKey as active tab', () => { const history = createMemoryHistory(); render( @@ -62,7 +62,7 @@ describe('RouteTab component', () => { ).toBeInTheDocument(); }); - test('navigates to correct route on tab click', () => { + it('navigates to correct route on tab click', () => { const history = createMemoryHistory(); render( @@ -74,7 +74,7 @@ describe('RouteTab component', () => { expect(history.location.pathname).toBe('/tab2'); }); - test('calls onChangeHandler on tab change', () => { + it('calls onChangeHandler on tab change', () => { const onChangeHandler = jest.fn(); const history = createMemoryHistory(); render( diff --git a/frontend/src/components/ServiceAccountDrawer/__tests__/EditKeyModal.test.tsx b/frontend/src/components/ServiceAccountDrawer/__tests__/EditKeyModal.test.tsx index 08a31a0bbf..3b973c8acc 100644 --- a/frontend/src/components/ServiceAccountDrawer/__tests__/EditKeyModal.test.tsx +++ b/frontend/src/components/ServiceAccountDrawer/__tests__/EditKeyModal.test.tsx @@ -70,9 +70,9 @@ describe('EditKeyModal (URL-controlled)', () => { it('renders key data from prop when edit-key param is set', async () => { renderModal(); - expect( - await screen.findByDisplayValue('Original Key Name'), - ).toBeInTheDocument(); + await expect( + screen.findByDisplayValue('Original Key Name'), + ).resolves.toBeInTheDocument(); expect(screen.getByRole('button', { name: /Save Changes/i })).toBeDisabled(); }); @@ -111,7 +111,7 @@ describe('EditKeyModal (URL-controlled)', () => { const latestUrlUpdate = onUrlUpdate.mock.calls[onUrlUpdate.mock.calls.length - 1]?.[0]; - expect(latestUrlUpdate).toEqual( + expect(latestUrlUpdate).toStrictEqual( expect.objectContaining({ queryString: expect.any(String), }), @@ -134,9 +134,9 @@ describe('EditKeyModal (URL-controlled)', () => { await user.click(screen.getByRole('button', { name: /Revoke Key/i })); // Same dialog, now showing revoke confirmation - expect( - await screen.findByRole('dialog', { name: /Revoke Original Key Name/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('dialog', { name: /Revoke Original Key Name/i }), + ).resolves.toBeInTheDocument(); expect( screen.getByText(/Revoking this key will permanently invalidate it/i), ).toBeInTheDocument(); diff --git a/frontend/src/components/ServiceAccountDrawer/__tests__/ServiceAccountDrawer.test.tsx b/frontend/src/components/ServiceAccountDrawer/__tests__/ServiceAccountDrawer.test.tsx index 08625f2ec4..bd0262200a 100644 --- a/frontend/src/components/ServiceAccountDrawer/__tests__/ServiceAccountDrawer.test.tsx +++ b/frontend/src/components/ServiceAccountDrawer/__tests__/ServiceAccountDrawer.test.tsx @@ -104,7 +104,9 @@ describe('ServiceAccountDrawer', () => { it('renders Overview tab by default: editable name input, locked email, Save disabled when not dirty', async () => { renderDrawer(); - expect(await screen.findByDisplayValue('CI Bot')).toBeInTheDocument(); + await expect( + screen.findByDisplayValue('CI Bot'), + ).resolves.toBeInTheDocument(); expect(screen.getByText('ci-bot@signoz.io')).toBeInTheDocument(); expect(screen.getByRole('button', { name: /Save Changes/i })).toBeDisabled(); }); @@ -272,11 +274,11 @@ describe('ServiceAccountDrawer', () => { renderDrawer(); - expect( - await screen.findByText( + await expect( + screen.findByText( /An unexpected error occurred while fetching service account details/i, ), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); }); }); @@ -349,11 +351,11 @@ describe('ServiceAccountDrawer – save-error UX', () => { await waitFor(() => expect(saveBtn).not.toBeDisabled()); await user.click(saveBtn); - expect( - await screen.findByText(/Name update.*name update failed/i, undefined, { + await expect( + screen.findByText(/Name update.*name update failed/i, undefined, { timeout: 5000, }), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); }); it('role add failure shows SaveErrorItem with the role name context', async () => { @@ -385,15 +387,11 @@ describe('ServiceAccountDrawer – save-error UX', () => { await waitFor(() => expect(saveBtn).not.toBeDisabled()); await user.click(saveBtn); - expect( - await screen.findByText( - /Role 'signoz-viewer'.*role assign failed/i, - undefined, - { - timeout: 5000, - }, - ), - ).toBeInTheDocument(); + await expect( + screen.findByText(/Role 'signoz-viewer'.*role assign failed/i, undefined, { + timeout: 5000, + }), + ).resolves.toBeInTheDocument(); }); it('role add retries on 429 then succeeds without showing an error', async () => { diff --git a/frontend/src/components/TanStackTableView/__tests__/TanStackCustomTableRow.test.tsx b/frontend/src/components/TanStackTableView/__tests__/TanStackCustomTableRow.test.tsx index 4ebc7fcd27..e145562fa0 100644 --- a/frontend/src/components/TanStackTableView/__tests__/TanStackCustomTableRow.test.tsx +++ b/frontend/src/components/TanStackTableView/__tests__/TanStackCustomTableRow.test.tsx @@ -63,7 +63,9 @@ describe('TanStackCustomTableRow', () => { , ); - expect(await screen.findByTestId('mocked-row-cells')).toBeInTheDocument(); + await expect( + screen.findByTestId('mocked-row-cells'), + ).resolves.toBeInTheDocument(); }); it('applies active class when isRowActive returns true', () => { diff --git a/frontend/src/components/TanStackTableView/__tests__/TanStackRow.test.tsx b/frontend/src/components/TanStackTableView/__tests__/TanStackRow.test.tsx index 662f44d9f2..a4212650cd 100644 --- a/frontend/src/components/TanStackTableView/__tests__/TanStackRow.test.tsx +++ b/frontend/src/components/TanStackTableView/__tests__/TanStackRow.test.tsx @@ -183,7 +183,7 @@ describe('TanStackRowCells', () => { , ); - expect(await screen.findByText('expanded-r1')).toBeInTheDocument(); + await expect(screen.findByText('expanded-r1')).resolves.toBeInTheDocument(); }); describe('new tab click', () => { diff --git a/frontend/src/components/TanStackTableView/__tests__/useColumnState.test.tsx b/frontend/src/components/TanStackTableView/__tests__/useColumnState.test.tsx index e8464a62c4..3f1086bab9 100644 --- a/frontend/src/components/TanStackTableView/__tests__/useColumnState.test.tsx +++ b/frontend/src/components/TanStackTableView/__tests__/useColumnState.test.tsx @@ -36,7 +36,7 @@ describe('useColumnState', () => { renderHook(() => useColumnState({ storageKey: TEST_KEY, columns })); const state = useColumnStore.getState().tables[TEST_KEY]; - expect(state.hiddenColumnIds).toEqual(['b']); + expect(state.hiddenColumnIds).toStrictEqual(['b']); }); it('does not initialize without storageKey', () => { @@ -61,7 +61,7 @@ describe('useColumnState', () => { useColumnState({ storageKey: TEST_KEY, columns }), ); - expect(result.current.columnVisibility).toEqual({ b: false }); + expect(result.current.columnVisibility).toStrictEqual({ b: false }); }); it('applies visibilityBehavior for grouped state', () => { @@ -79,13 +79,15 @@ describe('useColumnState', () => { const { result: notGrouped } = renderHook(() => useColumnState({ storageKey: TEST_KEY, columns, isGrouped: false }), ); - expect(notGrouped.current.columnVisibility).toEqual({ grouped: false }); + expect(notGrouped.current.columnVisibility).toStrictEqual({ + grouped: false, + }); // Grouped const { result: grouped } = renderHook(() => useColumnState({ storageKey: TEST_KEY, columns, isGrouped: true }), ); - expect(grouped.current.columnVisibility).toEqual({ ungrouped: false }); + expect(grouped.current.columnVisibility).toStrictEqual({ ungrouped: false }); }); it('combines store hidden + visibilityBehavior', () => { @@ -103,7 +105,10 @@ describe('useColumnState', () => { useColumnState({ storageKey: TEST_KEY, columns, isGrouped: true }), ); - expect(result.current.columnVisibility).toEqual({ a: false, b: false }); + expect(result.current.columnVisibility).toStrictEqual({ + a: false, + b: false, + }); }); }); @@ -119,7 +124,7 @@ describe('useColumnState', () => { useColumnState({ storageKey: TEST_KEY, columns }), ); - expect(result.current.sortedColumns.map((c) => c.id)).toEqual([ + expect(result.current.sortedColumns.map((c) => c.id)).toStrictEqual([ 'a', 'b', 'c', @@ -138,7 +143,7 @@ describe('useColumnState', () => { useColumnState({ storageKey: TEST_KEY, columns }), ); - expect(result.current.sortedColumns.map((c) => c.id)).toEqual([ + expect(result.current.sortedColumns.map((c) => c.id)).toStrictEqual([ 'c', 'a', 'b', @@ -157,7 +162,7 @@ describe('useColumnState', () => { useColumnState({ storageKey: TEST_KEY, columns }), ); - expect(result.current.sortedColumns.map((c) => c.id)).toEqual([ + expect(result.current.sortedColumns.map((c) => c.id)).toStrictEqual([ 'pinned', 'b', 'a', @@ -181,7 +186,7 @@ describe('useColumnState', () => { result.current.hideColumn('a'); }); - expect(result.current.columnVisibility).toEqual({ a: false }); + expect(result.current.columnVisibility).toStrictEqual({ a: false }); }); it('showColumn shows a column', () => { @@ -195,13 +200,13 @@ describe('useColumnState', () => { useColumnState({ storageKey: TEST_KEY, columns }), ); - expect(result.current.columnVisibility).toEqual({ a: false }); + expect(result.current.columnVisibility).toStrictEqual({ a: false }); act(() => { result.current.showColumn('a'); }); - expect(result.current.columnVisibility).toEqual({}); + expect(result.current.columnVisibility).toStrictEqual({}); }); it('setColumnSizing updates sizing', () => { @@ -219,7 +224,7 @@ describe('useColumnState', () => { result.current.setColumnSizing({ a: 200 }); }); - expect(result.current.columnSizing).toEqual({ a: 200 }); + expect(result.current.columnSizing).toStrictEqual({ a: 200 }); }); it('setColumnOrder updates order from column array', () => { @@ -237,7 +242,7 @@ describe('useColumnState', () => { result.current.setColumnOrder([col('c'), col('a'), col('b')]); }); - expect(result.current.sortedColumns.map((c) => c.id)).toEqual([ + expect(result.current.sortedColumns.map((c) => c.id)).toStrictEqual([ 'c', 'a', 'b', diff --git a/frontend/src/components/TanStackTableView/__tests__/useColumnStore.test.ts b/frontend/src/components/TanStackTableView/__tests__/useColumnStore.test.ts index e2c87f7920..0c8ac69147 100644 --- a/frontend/src/components/TanStackTableView/__tests__/useColumnStore.test.ts +++ b/frontend/src/components/TanStackTableView/__tests__/useColumnStore.test.ts @@ -29,9 +29,9 @@ describe('useColumnStore', () => { }); const state = useColumnStore.getState().tables[TEST_KEY]; - expect(state.hiddenColumnIds).toEqual(['b']); - expect(state.columnOrder).toEqual([]); - expect(state.columnSizing).toEqual({}); + expect(state.hiddenColumnIds).toStrictEqual(['b']); + expect(state.columnOrder).toStrictEqual([]); + expect(state.columnSizing).toStrictEqual({}); }); it('does not reinitialize if already exists', () => { @@ -124,7 +124,9 @@ describe('useColumnStore', () => { .getState() .setColumnSizing(TEST_KEY, { col1: 200, col2: 300 }); }); - expect(useColumnStore.getState().tables[TEST_KEY].columnSizing).toEqual({ + expect( + useColumnStore.getState().tables[TEST_KEY].columnSizing, + ).toStrictEqual({ col1: 200, col2: 300, }); @@ -144,11 +146,9 @@ describe('useColumnStore', () => { .getState() .setColumnOrder(TEST_KEY, ['col2', 'col1', 'col3']); }); - expect(useColumnStore.getState().tables[TEST_KEY].columnOrder).toEqual([ - 'col2', - 'col1', - 'col3', - ]); + expect(useColumnStore.getState().tables[TEST_KEY].columnOrder).toStrictEqual( + ['col2', 'col1', 'col3'], + ); }); }); @@ -172,9 +172,9 @@ describe('useColumnStore', () => { }); const state = useColumnStore.getState().tables[TEST_KEY]; - expect(state.hiddenColumnIds).toEqual(['a']); - expect(state.columnOrder).toEqual([]); - expect(state.columnSizing).toEqual({}); + expect(state.hiddenColumnIds).toStrictEqual(['a']); + expect(state.columnOrder).toStrictEqual([]); + expect(state.columnSizing).toStrictEqual({}); }); }); @@ -195,7 +195,7 @@ describe('useColumnStore', () => { }); const state = useColumnStore.getState().tables[TEST_KEY]; - expect(state.hiddenColumnIds).toEqual(['col1', 'col3']); + expect(state.hiddenColumnIds).toStrictEqual(['col1', 'col3']); expect(state.hiddenColumnIds).not.toContain('col2'); }); @@ -216,7 +216,7 @@ describe('useColumnStore', () => { }); const stateAfter = useColumnStore.getState().tables[TEST_KEY]; - expect(stateAfter.hiddenColumnIds).toEqual(hiddenBefore); + expect(stateAfter.hiddenColumnIds).toStrictEqual(hiddenBefore); }); it('does nothing for unknown storage key', () => { @@ -242,7 +242,7 @@ describe('useColumnStore', () => { }); const { result } = renderHook(() => useHiddenColumnIds(TEST_KEY)); - expect(result.current).toEqual(['a']); + expect(result.current).toStrictEqual(['a']); }); it('useHiddenColumnIds returns a stable snapshot for persisted state', () => { @@ -270,7 +270,7 @@ describe('useColumnStore', () => { }); const { result } = renderHook(() => useColumnSizing(TEST_KEY)); - expect(result.current).toEqual({ col1: 150 }); + expect(result.current).toStrictEqual({ col1: 150 }); }); it('useColumnOrder returns order', () => { @@ -280,7 +280,7 @@ describe('useColumnStore', () => { }); const { result } = renderHook(() => useColumnOrder(TEST_KEY)); - expect(result.current).toEqual(['c', 'b', 'a']); + expect(result.current).toStrictEqual(['c', 'b', 'a']); }); it('returns empty defaults for unknown storageKey', () => { @@ -288,9 +288,9 @@ describe('useColumnStore', () => { const { result: sizing } = renderHook(() => useColumnSizing('unknown')); const { result: order } = renderHook(() => useColumnOrder('unknown')); - expect(hidden.current).toEqual([]); - expect(sizing.current).toEqual({}); - expect(order.current).toEqual([]); + expect(hidden.current).toStrictEqual([]); + expect(sizing.current).toStrictEqual({}); + expect(order.current).toStrictEqual([]); }); }); }); diff --git a/frontend/src/components/TanStackTableView/__tests__/useTableParams.test.tsx b/frontend/src/components/TanStackTableView/__tests__/useTableParams.test.tsx index c30f037e11..ad279be5de 100644 --- a/frontend/src/components/TanStackTableView/__tests__/useTableParams.test.tsx +++ b/frontend/src/components/TanStackTableView/__tests__/useTableParams.test.tsx @@ -80,7 +80,10 @@ describe('useTableParams (local mode — enableQueryParams not set)', () => { act(() => { result.current.setOrderBy({ columnName: 'cpu', order: 'desc' }); }); - expect(result.current.orderBy).toEqual({ columnName: 'cpu', order: 'desc' }); + expect(result.current.orderBy).toStrictEqual({ + columnName: 'cpu', + order: 'desc', + }); }); }); @@ -143,7 +146,10 @@ describe('useTableParams (URL mode — enableQueryParams set)', () => { const orderBy = JSON.stringify({ columnName: 'name', order: 'desc' }); const wrapper = createNuqsWrapper({ order_by: orderBy }); const { result } = renderHook(() => useTableParams(true), { wrapper }); - expect(result.current.orderBy).toEqual({ columnName: 'name', order: 'desc' }); + expect(result.current.orderBy).toStrictEqual({ + columnName: 'name', + order: 'desc', + }); }); it('updates URL when setPage is called', () => { @@ -178,7 +184,7 @@ describe('useTableParams (URL mode — enableQueryParams set)', () => { .filter(Boolean) .pop(); expect(lastOrderBy).toBeDefined(); - expect(JSON.parse(lastOrderBy!)).toEqual({ + expect(JSON.parse(lastOrderBy!)).toStrictEqual({ columnName: 'value', order: 'asc', }); @@ -207,7 +213,7 @@ describe('useTableParams (URL mode — enableQueryParams set)', () => { result.current.setExpanded({ 'row-1': true }); }); - expect(result.current.expanded).toEqual({ 'row-1': true }); + expect(result.current.expanded).toStrictEqual({ 'row-1': true }); }); it('toggles sort order correctly: null → asc → desc → null', () => { @@ -222,13 +228,19 @@ describe('useTableParams (URL mode — enableQueryParams set)', () => { act(() => { result.current.setOrderBy({ columnName: 'id', order: 'asc' }); }); - expect(result.current.orderBy).toEqual({ columnName: 'id', order: 'asc' }); + expect(result.current.orderBy).toStrictEqual({ + columnName: 'id', + order: 'asc', + }); // Second click: asc → desc act(() => { result.current.setOrderBy({ columnName: 'id', order: 'desc' }); }); - expect(result.current.orderBy).toEqual({ columnName: 'id', order: 'desc' }); + expect(result.current.orderBy).toStrictEqual({ + columnName: 'id', + order: 'desc', + }); // Third click: desc → null act(() => { diff --git a/frontend/src/components/YAxisUnitSelector/__tests__/formatter.test.tsx b/frontend/src/components/YAxisUnitSelector/__tests__/formatter.test.tsx index 71e2c5bbd5..140b1cd77d 100644 --- a/frontend/src/components/YAxisUnitSelector/__tests__/formatter.test.tsx +++ b/frontend/src/components/YAxisUnitSelector/__tests__/formatter.test.tsx @@ -8,7 +8,7 @@ import { formatUniversalUnit } from '../formatter'; describe('formatUniversalUnit', () => { describe('Time', () => { - test.each([ + it.each([ // Days [31, UniversalYAxisUnit.DAYS, '4.43 weeks'], [7, UniversalYAxisUnit.DAYS, '1 week'], @@ -48,7 +48,7 @@ describe('formatUniversalUnit', () => { }); describe('Data', () => { - test.each([ + it.each([ // Bytes [864, UniversalYAxisUnit.BYTES, '864 B'], [1000, UniversalYAxisUnit.BYTES, '1 kB'], @@ -91,7 +91,7 @@ describe('formatUniversalUnit', () => { }); describe('Data rate', () => { - test.each([ + it.each([ // Bytes/second [864, UniversalYAxisUnit.BYTES_SECOND, '864 B/s'], [1000, UniversalYAxisUnit.BYTES_SECOND, '1 kB/s'], @@ -134,7 +134,7 @@ describe('formatUniversalUnit', () => { }); describe('Bit', () => { - test.each([ + it.each([ // Bits [1, UniversalYAxisUnit.BITS, '1 b'], [250, UniversalYAxisUnit.BITS, '250 b'], @@ -186,7 +186,7 @@ describe('formatUniversalUnit', () => { }); describe('Bit rate', () => { - test.each([ + it.each([ // Bits/second [512, UniversalYAxisUnit.BITS_SECOND, '512 b/s'], [1000, UniversalYAxisUnit.BITS_SECOND, '1 kb/s'], @@ -236,7 +236,7 @@ describe('formatUniversalUnit', () => { }); describe('Count', () => { - test.each([ + it.each([ [100, UniversalYAxisUnit.COUNT, '100'], [875, UniversalYAxisUnit.COUNT, '875'], [1000, UniversalYAxisUnit.COUNT, '1 K'], @@ -256,7 +256,7 @@ describe('formatUniversalUnit', () => { expect(formatUniversalUnit(value, unit)).toBe(expected); }); - test.each([ + it.each([ [100, UniversalYAxisUnit.COUNT_SECOND, '100 c/s'], [875, UniversalYAxisUnit.COUNT_SECOND, '875 c/s'], [1000, UniversalYAxisUnit.COUNT_SECOND, '1K c/s'], @@ -267,7 +267,7 @@ describe('formatUniversalUnit', () => { expect(formatUniversalUnit(value, unit)).toBe(expected); }); - test.each([ + it.each([ [100, UniversalYAxisUnit.COUNT_MINUTE, '100 c/m'], [875, UniversalYAxisUnit.COUNT_MINUTE, '875 c/m'], [1000, UniversalYAxisUnit.COUNT_MINUTE, '1K c/m'], @@ -280,7 +280,7 @@ describe('formatUniversalUnit', () => { }); describe('Operations units', () => { - test.each([ + it.each([ [780, UniversalYAxisUnit.OPS_SECOND, '780 ops/s'], [1000, UniversalYAxisUnit.OPS_SECOND, '1K ops/s'], [520, UniversalYAxisUnit.OPS_MINUTE, '520 ops/m'], @@ -297,7 +297,7 @@ describe('formatUniversalUnit', () => { }); describe('Request units', () => { - test.each([ + it.each([ [615, UniversalYAxisUnit.REQUESTS_SECOND, '615 req/s'], [1000, UniversalYAxisUnit.REQUESTS_SECOND, '1K req/s'], [480, UniversalYAxisUnit.REQUESTS_MINUTE, '480 req/m'], @@ -311,7 +311,7 @@ describe('formatUniversalUnit', () => { }); describe('Read/Write units', () => { - test.each([ + it.each([ [505, UniversalYAxisUnit.READS_SECOND, '505 rd/s'], [1000, UniversalYAxisUnit.READS_SECOND, '1K rd/s'], [610, UniversalYAxisUnit.WRITES_SECOND, '610 wr/s'], @@ -335,7 +335,7 @@ describe('formatUniversalUnit', () => { }); describe('IO Operations units', () => { - test.each([ + it.each([ [777, UniversalYAxisUnit.IOOPS_SECOND, '777 io/s'], [1000, UniversalYAxisUnit.IOOPS_SECOND, '1K io/s'], [2500, UniversalYAxisUnit.IOOPS_SECOND, '2.5K io/s'], @@ -363,7 +363,7 @@ describe('formatUniversalUnit', () => { }); describe('Time (additional)', () => { - test.each([ + it.each([ [900, UniversalYAxisUnit.DURATION_MS, '900 milliseconds'], [1000, UniversalYAxisUnit.DURATION_MS, '1 second'], [1, UniversalYAxisUnit.DURATION_MS, '1 millisecond'], @@ -388,7 +388,7 @@ describe('formatUniversalUnit', () => { }); describe('Data (IEC/Binary)', () => { - test.each([ + it.each([ // Bytes [900, UniversalYAxisUnit.BYTES_IEC, '900 B'], [1024, UniversalYAxisUnit.BYTES_IEC, '1 KiB'], @@ -430,7 +430,7 @@ describe('formatUniversalUnit', () => { }); describe('Data Rate (IEC/Binary)', () => { - test.each([ + it.each([ // Kibibytes/second [900, UniversalYAxisUnit.KIBIBYTES_SECOND, '900 KiB/s'], [1024, UniversalYAxisUnit.KIBIBYTES_SECOND, '1 MiB/s'], @@ -473,7 +473,7 @@ describe('formatUniversalUnit', () => { }); describe('Bits (IEC)', () => { - test.each([ + it.each([ [900, UniversalYAxisUnit.BITS_IEC, '900 b'], [1024, UniversalYAxisUnit.BITS_IEC, '1 Kib'], [1080, UniversalYAxisUnit.BITS_IEC, '1.05 Kib'], @@ -483,7 +483,7 @@ describe('formatUniversalUnit', () => { }); describe('Hash Rate', () => { - test.each([ + it.each([ // Hashes/second [412, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '412 H/s'], [1000, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1 kH/s'], @@ -518,7 +518,7 @@ describe('formatUniversalUnit', () => { }); describe('Miscellaneous', () => { - test.each([ + it.each([ [742, UniversalYAxisUnit.MISC_STRING, '742'], [688, UniversalYAxisUnit.MISC_SHORT, '688'], [555, UniversalYAxisUnit.MISC_HUMIDITY, '555 %H'], @@ -534,7 +534,7 @@ describe('formatUniversalUnit', () => { }); describe('Acceleration', () => { - test.each([ + it.each([ [ 875, UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED, @@ -553,7 +553,7 @@ describe('formatUniversalUnit', () => { }); describe('Angular', () => { - test.each([ + it.each([ [415, UniversalYAxisUnit.ANGULAR_DEGREE, '415 °'], [732, UniversalYAxisUnit.ANGULAR_RADIAN, '732 rad'], [128, UniversalYAxisUnit.ANGULAR_GRADIAN, '128 grad'], @@ -565,7 +565,7 @@ describe('formatUniversalUnit', () => { }); describe('Area', () => { - test.each([ + it.each([ [210, UniversalYAxisUnit.AREA_SQUARE_METERS, '210 m²'], [152, UniversalYAxisUnit.AREA_SQUARE_FEET, '152 ft²'], [64, UniversalYAxisUnit.AREA_SQUARE_MILES, '64 mi²'], @@ -575,7 +575,7 @@ describe('formatUniversalUnit', () => { }); describe('FLOPs', () => { - test.each([ + it.each([ // FLOPS [150, UniversalYAxisUnit.FLOPS_FLOPS, '150 FLOPS'], [1000, UniversalYAxisUnit.FLOPS_FLOPS, '1 kFLOPS'], @@ -613,7 +613,7 @@ describe('formatUniversalUnit', () => { }); describe('Concentration', () => { - test.each([ + it.each([ [415, UniversalYAxisUnit.CONCENTRATION_PPM, '415 ppm'], [1000, UniversalYAxisUnit.CONCENTRATION_PPM, '1000 ppm'], [732, UniversalYAxisUnit.CONCENTRATION_PPB, '732 ppb'], @@ -650,7 +650,7 @@ describe('formatUniversalUnit', () => { }); describe('Currency', () => { - test.each([ + it.each([ [812, UniversalYAxisUnit.CURRENCY_USD, '$812'], [645, UniversalYAxisUnit.CURRENCY_GBP, '£645'], [731, UniversalYAxisUnit.CURRENCY_EUR, '€731'], @@ -688,7 +688,7 @@ describe('formatUniversalUnit', () => { }); describe('Power/Electrical', () => { - test.each([ + it.each([ [715, UniversalYAxisUnit.POWER_WATT, '715 W'], [1000, UniversalYAxisUnit.POWER_WATT, '1 kW'], [1080, UniversalYAxisUnit.POWER_WATT, '1.08 kW'], @@ -744,7 +744,7 @@ describe('formatUniversalUnit', () => { }); describe('Flow', () => { - test.each([ + it.each([ [512, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '512 gpm'], [1000, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '1000 gpm'], [678, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '678 cms'], @@ -766,7 +766,7 @@ describe('formatUniversalUnit', () => { }); describe('Force', () => { - test.each([ + it.each([ [845, UniversalYAxisUnit.FORCE_NEWTON_METERS, '845 Nm'], [1000, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1 kNm'], [1080, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1.08 kNm'], @@ -782,7 +782,7 @@ describe('formatUniversalUnit', () => { }); describe('Mass', () => { - test.each([ + it.each([ [120, UniversalYAxisUnit.MASS_MILLIGRAM, '120 mg'], [120000, UniversalYAxisUnit.MASS_MILLIGRAM, '120 g'], [987, UniversalYAxisUnit.MASS_GRAM, '987 g'], @@ -796,7 +796,7 @@ describe('formatUniversalUnit', () => { }); describe('Length', () => { - test.each([ + it.each([ [88, UniversalYAxisUnit.LENGTH_MILLIMETER, '88 mm'], [100, UniversalYAxisUnit.LENGTH_MILLIMETER, '100 mm'], [1000, UniversalYAxisUnit.LENGTH_MILLIMETER, '1 m'], @@ -812,7 +812,7 @@ describe('formatUniversalUnit', () => { }); describe('Pressure', () => { - test.each([ + it.each([ [45, UniversalYAxisUnit.PRESSURE_MILLIBAR, '45 mbar'], [1013, UniversalYAxisUnit.PRESSURE_MILLIBAR, '1.01 bar'], [27, UniversalYAxisUnit.PRESSURE_BAR, '27 bar'], @@ -828,7 +828,7 @@ describe('formatUniversalUnit', () => { }); describe('Radiation', () => { - test.each([ + it.each([ [452, UniversalYAxisUnit.RADIATION_BECQUEREL, '452 Bq'], [37, UniversalYAxisUnit.RADIATION_CURIE, '37 Ci'], [128, UniversalYAxisUnit.RADIATION_GRAY, '128 Gy'], @@ -849,7 +849,7 @@ describe('formatUniversalUnit', () => { }); describe('Rotation Speed', () => { - test.each([ + it.each([ [345, UniversalYAxisUnit.ROTATION_SPEED_REVOLUTIONS_PER_MINUTE, '345 rpm'], [789, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 Hz'], [789000, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 kHz'], @@ -861,7 +861,7 @@ describe('formatUniversalUnit', () => { }); describe('Temperature', () => { - test.each([ + it.each([ [37, UniversalYAxisUnit.TEMPERATURE_CELSIUS, '37 °C'], [451, UniversalYAxisUnit.TEMPERATURE_FAHRENHEIT, '451 °F'], [310, UniversalYAxisUnit.TEMPERATURE_KELVIN, '310 K'], @@ -871,7 +871,7 @@ describe('formatUniversalUnit', () => { }); describe('Velocity', () => { - test.each([ + it.each([ [900, UniversalYAxisUnit.VELOCITY_METERS_PER_SECOND, '900 m/s'], [456, UniversalYAxisUnit.VELOCITY_KILOMETERS_PER_HOUR, '456 km/h'], [789, UniversalYAxisUnit.VELOCITY_MILES_PER_HOUR, '789 mph'], @@ -882,7 +882,7 @@ describe('formatUniversalUnit', () => { }); describe('Volume', () => { - test.each([ + it.each([ [1200, UniversalYAxisUnit.VOLUME_MILLILITER, '1.2 L'], [9000000, UniversalYAxisUnit.VOLUME_MILLILITER, '9 kL'], [9, UniversalYAxisUnit.VOLUME_LITER, '9 L'], diff --git a/frontend/src/components/YAxisUnitSelector/__tests__/utils.test.tsx b/frontend/src/components/YAxisUnitSelector/__tests__/utils.test.tsx index ef7c8a0f89..5957dfc419 100644 --- a/frontend/src/components/YAxisUnitSelector/__tests__/utils.test.tsx +++ b/frontend/src/components/YAxisUnitSelector/__tests__/utils.test.tsx @@ -16,8 +16,8 @@ describe('YAxisUnitSelector utils', () => { it('returns null or self for unknown units', () => { expect(mapMetricUnitToUniversalUnit('unknown_unit')).toBe('unknown_unit'); - expect(mapMetricUnitToUniversalUnit('')).toBe(null); - expect(mapMetricUnitToUniversalUnit(undefined)).toBe(null); + expect(mapMetricUnitToUniversalUnit('')).toBeNull(); + expect(mapMetricUnitToUniversalUnit(undefined)).toBeNull(); }); }); @@ -62,7 +62,7 @@ describe('YAxisUnitSelector utils', () => { }, ]; const mergedCategories = mergeCategories(categories1, categories2); - expect(mergedCategories).toEqual([ + expect(mergedCategories).toStrictEqual([ { name: YAxisCategoryNames.Data, units: [ diff --git a/frontend/src/components/cmdKPalette/__test__/cmdkPalette.test.tsx b/frontend/src/components/cmdKPalette/__test__/cmdkPalette.test.tsx index deb5ea3124..fb748815da 100644 --- a/frontend/src/components/cmdKPalette/__test__/cmdkPalette.test.tsx +++ b/frontend/src/components/cmdKPalette/__test__/cmdkPalette.test.tsx @@ -149,7 +149,7 @@ describe('CmdKPalette', () => { jest.clearAllMocks(); }); - test('renders navigation and settings groups and items', () => { + it('renders navigation and settings groups and items', () => { render(); expect(screen.getByText('Navigation')).toBeInTheDocument(); @@ -160,7 +160,7 @@ describe('CmdKPalette', () => { expect(screen.getByText('Switch to Dark Mode')).toBeInTheDocument(); }); - test('clicking a navigation item calls history.push with correct route', async () => { + it('clicking a navigation item calls history.push with correct route', async () => { const user = userEvent.setup({ pointerEventsCheck: 0 }); render(); @@ -170,14 +170,14 @@ describe('CmdKPalette', () => { expect(history.push).toHaveBeenCalledWith(ROUTES.HOME); }); - test('role-based filtering (basic smoke)', () => { + it('role-based filtering (basic smoke)', () => { render(); // VIEWER still sees basic navigation items expect(screen.getByText(HOME_LABEL)).toBeInTheDocument(); }); - test('keyboard shortcut opens palette via setOpen', () => { + it('keyboard shortcut opens palette via setOpen', () => { render(); const event = new KeyboardEvent('keydown', { key: 'k', ctrlKey: true }); @@ -186,7 +186,7 @@ describe('CmdKPalette', () => { expect(mockSetOpen).toHaveBeenCalledWith(true); }); - test('items render with icons when provided', () => { + it('items render with icons when provided', () => { render(); const iconHolders = document.querySelectorAll('.cmd-item-icon'); @@ -194,7 +194,7 @@ describe('CmdKPalette', () => { expect(screen.getByText(HOME_LABEL)).toBeInTheDocument(); }); - test('closing the palette via handleInvoke sets open to false', async () => { + it('closing the palette via handleInvoke sets open to false', async () => { const user = userEvent.setup({ pointerEventsCheck: 0 }); render(); diff --git a/frontend/src/container/AllAlertChannels/__tests__/AlertChannels.test.tsx b/frontend/src/container/AllAlertChannels/__tests__/AlertChannels.test.tsx index 0af74e3d07..107713eb5b 100644 --- a/frontend/src/container/AllAlertChannels/__tests__/AlertChannels.test.tsx +++ b/frontend/src/container/AllAlertChannels/__tests__/AlertChannels.test.tsx @@ -34,13 +34,13 @@ describe('Alert Channels Settings List page', () => { jest.useRealTimers(); }); describe('Should display the Alert Channels page properly', () => { - it('Should check if "The alerts will be sent to all the configured channels." is visible ', () => { + it('Should check if "The alerts will be sent to all the configured channels." is visible', () => { expect(screen.getByText('sending_channels_note')).toBeInTheDocument(); }); - it('Should check if "New Alert Channel" Button is visble ', () => { + it('Should check if "New Alert Channel" Button is visble', () => { expect(screen.getByText('button_new_channel')).toBeInTheDocument(); }); - it('Should check if the help icon is visible and displays "tooltip_notification_channels ', async () => { + it('Should check if the help icon is visible and displays "tooltip_notification_channels', async () => { const helpIcon = screen.getByLabelText('question-circle'); fireEvent.mouseOver(helpIcon); diff --git a/frontend/src/container/AllAlertChannels/__tests__/AlertChannelsNormalUser.test.tsx b/frontend/src/container/AllAlertChannels/__tests__/AlertChannelsNormalUser.test.tsx index ce69c98b61..d5eccd51c1 100644 --- a/frontend/src/container/AllAlertChannels/__tests__/AlertChannelsNormalUser.test.tsx +++ b/frontend/src/container/AllAlertChannels/__tests__/AlertChannelsNormalUser.test.tsx @@ -38,7 +38,7 @@ describe('Alert Channels Settings List page (Normal User)', () => { jest.useRealTimers(); }); describe('Should display the Alert Channels page properly', () => { - it('Should check if "The alerts will be sent to all the configured channels." is visible ', async () => { + it('Should check if "The alerts will be sent to all the configured channels." is visible', async () => { await waitFor(() => expect(screen.getByText('sending_channels_note')).toBeInTheDocument(), ); @@ -51,7 +51,7 @@ describe('Alert Channels Settings List page (Normal User)', () => { await waitFor(() => expect(newAlertButton).toBeInTheDocument()); expect(newAlertButton).toBeDisabled(); }); - it('Should check if the help icon is visible and displays "tooltip_notification_channels ', async () => { + it('Should check if the help icon is visible and displays "tooltip_notification_channels', async () => { const helpIcon = screen.getByLabelText('question-circle'); fireEvent.mouseOver(helpIcon); diff --git a/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx b/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx index 63d142a744..83ab7e33ab 100644 --- a/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx +++ b/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx @@ -44,7 +44,7 @@ describe('Create Alert Channel', () => { afterEach(() => { jest.clearAllMocks(); }); - describe('Should check if the new alert channel is properly displayed with the cascading fields of slack channel ', () => { + describe('Should check if the new alert channel is properly displayed with the cascading fields of slack channel', () => { beforeEach(() => { render(); }); @@ -54,13 +54,13 @@ describe('Create Alert Channel', () => { it('Should check if the title is "New Notification Channels"', () => { expect(screen.getByText('page_title_create')).toBeInTheDocument(); }); - it('Should check if the name label and textbox are displayed properly ', () => { + it('Should check if the name label and textbox are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_channel_name', testId: 'channel-name-textbox', }); }); - it('Should check if Send resolved alerts label and checkbox are displayed properly ', () => { + it('Should check if Send resolved alerts label and checkbox are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_send_resolved', testId: 'field-send-resolved-checkbox', @@ -76,13 +76,13 @@ describe('Create Alert Channel', () => { it('Should check if the selected item in the type dropdown has text "Slack"', () => { expect(screen.getByText('Slack')).toBeInTheDocument(); }); - it('Should check if Webhook URL label and input are displayed properly ', () => { + it('Should check if Webhook URL label and input are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_url', testId: 'webhook-url-textbox', }); }); - it('Should check if Recepient label, input, and help text are displayed properly ', () => { + it('Should check if Recepient label, input, and help text are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_recipient', testId: 'slack-channel-textbox', @@ -90,7 +90,7 @@ describe('Create Alert Channel', () => { }); }); - it('Should check if Title label and text area are displayed properly ', () => { + it('Should check if Title label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_title', testId: 'title-textarea', @@ -101,7 +101,7 @@ describe('Create Alert Channel', () => { expect(titleTextArea).toHaveTextContent(slackTitleDefaultValue); }); - it('Should check if Description label and text area are displayed properly ', () => { + it('Should check if Description label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_description', testId: 'description-textarea', @@ -177,13 +177,13 @@ describe('Create Alert Channel', () => { it('Should check if the selected item in the type dropdown has text "Webhook"', () => { expect(screen.getByText('Webhook')).toBeInTheDocument(); }); - it('Should check if Webhook URL label and input are displayed properly ', () => { + it('Should check if Webhook URL label and input are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_url', testId: 'webhook-url-textbox', }); }); - it('Should check if Webhook User Name label, input, and help text are displayed properly ', () => { + it('Should check if Webhook User Name label, input, and help text are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_username', testId: 'webhook-username-textbox', @@ -321,7 +321,7 @@ describe('Create Alert Channel', () => { }); }); - it('Should check if Message contains the default template ', () => { + it('Should check if Message contains the default template', () => { const messageTextArea = screen.getByTestId('opsgenie-message-textarea'); expect(messageTextArea).toHaveValue(opsGenieMessageDefaultValue); @@ -387,14 +387,14 @@ describe('Create Alert Channel', () => { expect(screen.getByText('Microsoft Teams')).toBeInTheDocument(); }); - it('Should check if Webhook URL label and input are displayed properly ', () => { + it('Should check if Webhook URL label and input are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_url', testId: 'webhook-url-textbox', }); }); - it('Should check if Title label and text area are displayed properly ', () => { + it('Should check if Title label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_title', testId: 'title-textarea', @@ -406,7 +406,7 @@ describe('Create Alert Channel', () => { expect(titleTextArea).toHaveTextContent(slackTitleDefaultValue); }); - it('Should check if Description label and text area are displayed properly ', () => { + it('Should check if Description label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_description', testId: 'description-textarea', diff --git a/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannelNormalUser.test.tsx b/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannelNormalUser.test.tsx index ac4ccb962e..f1b77f843a 100644 --- a/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannelNormalUser.test.tsx +++ b/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannelNormalUser.test.tsx @@ -23,20 +23,20 @@ describe('Create Alert Channel (Normal User)', () => { afterEach(() => { jest.clearAllMocks(); }); - describe('Should check if the new alert channel is properly displayed with the cascading fields of slack channel ', () => { + describe('Should check if the new alert channel is properly displayed with the cascading fields of slack channel', () => { beforeEach(() => { render(); }); it('Should check if the title is "New Notification Channels"', () => { expect(screen.getByText('page_title_create')).toBeInTheDocument(); }); - it('Should check if the name label and textbox are displayed properly ', () => { + it('Should check if the name label and textbox are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_channel_name', testId: 'channel-name-textbox', }); }); - it('Should check if Send resolved alerts label and checkbox are displayed properly ', () => { + it('Should check if Send resolved alerts label and checkbox are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_send_resolved', testId: 'field-send-resolved-checkbox', @@ -52,13 +52,13 @@ describe('Create Alert Channel (Normal User)', () => { it('Should check if the selected item in the type dropdown has text "Slack"', () => { expect(screen.getByText('Slack')).toBeInTheDocument(); }); - it('Should check if Webhook URL label and input are displayed properly ', () => { + it('Should check if Webhook URL label and input are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_url', testId: 'webhook-url-textbox', }); }); - it('Should check if Recepient label, input, and help text are displayed properly ', () => { + it('Should check if Recepient label, input, and help text are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_recipient', testId: 'slack-channel-textbox', @@ -66,7 +66,7 @@ describe('Create Alert Channel (Normal User)', () => { }); }); - it('Should check if Title label and text area are displayed properly ', () => { + it('Should check if Title label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_title', testId: 'title-textarea', @@ -77,7 +77,7 @@ describe('Create Alert Channel (Normal User)', () => { expect(titleTextArea).toHaveTextContent(slackTitleDefaultValue); }); - it('Should check if Description label and text area are displayed properly ', () => { + it('Should check if Description label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_description', testId: 'description-textarea', @@ -103,13 +103,13 @@ describe('Create Alert Channel (Normal User)', () => { it('Should check if the selected item in the type dropdown has text "Webhook"', () => { expect(screen.getByText('Webhook')).toBeInTheDocument(); }); - it('Should check if Webhook URL label and input are displayed properly ', () => { + it('Should check if Webhook URL label and input are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_url', testId: 'webhook-url-textbox', }); }); - it('Should check if Webhook User Name label, input, and help text are displayed properly ', () => { + it('Should check if Webhook User Name label, input, and help text are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_username', testId: 'webhook-username-textbox', @@ -247,7 +247,7 @@ describe('Create Alert Channel (Normal User)', () => { }); }); - it('Should check if Message contains the default template ', () => { + it('Should check if Message contains the default template', () => { const messageTextArea = screen.getByTestId('opsgenie-message-textarea'); expect(messageTextArea).toHaveValue(opsGenieMessageDefaultValue); diff --git a/frontend/src/container/AllAlertChannels/__tests__/EditAlertChannel.test.tsx b/frontend/src/container/AllAlertChannels/__tests__/EditAlertChannel.test.tsx index 0c58900be6..a0dac0e039 100644 --- a/frontend/src/container/AllAlertChannels/__tests__/EditAlertChannel.test.tsx +++ b/frontend/src/container/AllAlertChannels/__tests__/EditAlertChannel.test.tsx @@ -24,7 +24,7 @@ jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({ MarkdownRenderer: jest.fn(() =>
Mocked MarkdownRenderer
), })); -describe('Should check if the edit alert channel is properly displayed ', () => { +describe('Should check if the edit alert channel is properly displayed', () => { beforeEach(() => { render(); }); @@ -35,14 +35,14 @@ describe('Should check if the edit alert channel is properly displayed ', () => expect(screen.getByText('page_title_edit')).toBeInTheDocument(); }); - it('Should check if the name label and textbox are displayed properly ', () => { + it('Should check if the name label and textbox are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_channel_name', testId: 'channel-name-textbox', value: 'Dummy-Channel', }); }); - it('Should check if Send resolved alerts label and checkbox are displayed properly and the checkbox is checked ', () => { + it('Should check if Send resolved alerts label and checkbox are displayed properly and the checkbox is checked', () => { testLabelInputAndHelpValue({ labelText: 'field_send_resolved', testId: 'field-send-resolved-checkbox', @@ -61,7 +61,7 @@ describe('Should check if the edit alert channel is properly displayed ', () => expect(screen.getByText('Slack')).toBeInTheDocument(); }); - it('Should check if Webhook URL label and input are displayed properly ', () => { + it('Should check if Webhook URL label and input are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_webhook_url', testId: 'webhook-url-textbox', @@ -70,7 +70,7 @@ describe('Should check if the edit alert channel is properly displayed ', () => }); }); - it('Should check if Recepient label, input, and help text are displayed properly ', () => { + it('Should check if Recepient label, input, and help text are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_recipient', testId: 'slack-channel-textbox', @@ -79,7 +79,7 @@ describe('Should check if the edit alert channel is properly displayed ', () => }); }); - it('Should check if Title label and text area are displayed properly ', () => { + it('Should check if Title label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_title', testId: 'title-textarea', @@ -92,7 +92,7 @@ describe('Should check if the edit alert channel is properly displayed ', () => expect(titleTextArea).toHaveTextContent(slackTitleDefaultValue); }); - it('Should check if Description label and text area are displayed properly ', () => { + it('Should check if Description label and text area are displayed properly', () => { testLabelInputAndHelpValue({ labelText: 'field_slack_description', testId: 'description-textarea', diff --git a/frontend/src/container/AllError/utils.test.ts b/frontend/src/container/AllError/utils.test.ts index 344d318ebf..ec60a694f2 100644 --- a/frontend/src/container/AllError/utils.test.ts +++ b/frontend/src/container/AllError/utils.test.ts @@ -12,7 +12,7 @@ import { } from './utils'; describe('Error utils', () => { - test('Valid OrderBy Params', () => { + it('Valid OrderBy Params', () => { expect(isOrderParams('serviceName')).toBe(true); expect(isOrderParams('exceptionCount')).toBe(true); expect(isOrderParams('lastSeen')).toBe(true); @@ -20,24 +20,24 @@ describe('Error utils', () => { expect(isOrderParams('exceptionType')).toBe(true); }); - test('Invalid OrderBy Params', () => { + it('Invalid OrderBy Params', () => { expect(isOrderParams('invalid')).toBe(false); expect(isOrderParams(null)).toBe(false); expect(isOrderParams('')).toBe(false); }); - test('Valid Order', () => { + it('Valid Order', () => { expect(isOrder('ascending')).toBe(true); expect(isOrder('descending')).toBe(true); }); - test('Invalid Order', () => { + it('Invalid Order', () => { expect(isOrder('invalid')).toBe(false); expect(isOrder(null)).toBe(false); expect(isOrder('')).toBe(false); }); - test('Default Order', () => { + it('Default Order', () => { const OrderBy: OrderBy[] = [ 'exceptionCount', 'exceptionType', @@ -57,7 +57,7 @@ describe('Error utils', () => { }); }); - test('Limit', () => { + it('Limit', () => { expect(getLimit(null)).toBe(10); expect(getLimit('')).toBe(10); expect(getLimit('0')).toBe(0); @@ -68,7 +68,7 @@ describe('Error utils', () => { expect(getLimit('101')).toBe(101); }); - test('Update Page Size', () => { + it('Update Page Size', () => { expect(getUpdatePageSize(null)).toBe(10); expect(getUpdatePageSize('')).toBe(10); expect(getUpdatePageSize('0')).toBe(0); @@ -79,7 +79,7 @@ describe('Error utils', () => { expect(getUpdatePageSize('101')).toBe(101); }); - test('Order Params', () => { + it('Order Params', () => { expect(getOrderParams(null)).toBe('serviceName'); expect(getOrderParams('')).toBe('serviceName'); expect(getOrderParams('serviceName')).toBe('serviceName'); @@ -89,7 +89,7 @@ describe('Error utils', () => { expect(getOrderParams('exceptionType')).toBe('exceptionType'); }); - test('OffSet', () => { + it('OffSet', () => { expect(getOffSet(null)).toBe(0); expect(getOffSet('')).toBe(0); expect(getOffSet('0')).toBe(0); @@ -100,7 +100,7 @@ describe('Error utils', () => { expect(getOffSet('101')).toBe(101); }); - test('Order', () => { + it('Order', () => { expect(getOrder(null)).toBe('ascending'); expect(getOrder('')).toBe('ascending'); expect(getOrder('ascending')).toBe('ascending'); diff --git a/frontend/src/container/AnomalyAlertEvaluationView/AnomalyAlertEvaluationView.tsx b/frontend/src/container/AnomalyAlertEvaluationView/AnomalyAlertEvaluationView.tsx index 0275f0f85b..372d221404 100644 --- a/frontend/src/container/AnomalyAlertEvaluationView/AnomalyAlertEvaluationView.tsx +++ b/frontend/src/container/AnomalyAlertEvaluationView/AnomalyAlertEvaluationView.tsx @@ -30,18 +30,18 @@ function UplotChart({ useEffect(() => { if (plotInstance.current) { - // @ts-ignore + // @ts-expect-error plotInstance.current.destroy(); } if (data && data.length > 0) { - // @ts-ignore + // @ts-expect-error plotInstance.current = new uPlot(options, data, chartRef.current); } return (): void => { if (plotInstance.current) { - // @ts-ignore + // @ts-expect-error plotInstance.current.destroy(); } }; @@ -275,7 +275,7 @@ function AnomalyAlertEvaluationView({ }; const handleSearchValueChange = useDebouncedFn((event): void => { - // @ts-ignore + // @ts-expect-error const value = event?.target?.value || ''; handleSearch(value); diff --git a/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/DomainMetrics.test.tsx b/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/DomainMetrics.test.tsx index ac6e1e9368..ce61ec1cfb 100644 --- a/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/DomainMetrics.test.tsx +++ b/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/DomainMetrics.test.tsx @@ -233,7 +233,7 @@ describe('DomainMetrics - V5 Query Payload Tests', () => { // Wait for skeletons to disappear await waitFor(() => { const skeletons = document.querySelectorAll('.ant-skeleton-button'); - expect(skeletons.length).toBe(0); + expect(skeletons).toHaveLength(0); }); // Verify all metric labels are displayed @@ -272,7 +272,7 @@ describe('DomainMetrics - V5 Query Payload Tests', () => { await waitFor(() => { const skeletons = document.querySelectorAll('.ant-skeleton-button'); - expect(skeletons.length).toBe(0); + expect(skeletons).toHaveLength(0); }); // When no data, all values should show "-" diff --git a/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/EndPointMetrics.test.tsx b/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/EndPointMetrics.test.tsx index 7225ae1c01..4a42df0d6f 100644 --- a/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/EndPointMetrics.test.tsx +++ b/frontend/src/container/ApiMonitoring/Explorer/Domains/DomainDetails/components/EndPointMetrics.test.tsx @@ -303,7 +303,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => { // Wait for skeletons to disappear await waitFor(() => { const skeletons = document.querySelectorAll('.ant-skeleton-button'); - expect(skeletons.length).toBe(0); + expect(skeletons).toHaveLength(0); }); // Verify all metric labels are displayed @@ -342,7 +342,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => { await waitFor(() => { const skeletons = document.querySelectorAll('.ant-skeleton-button'); - expect(skeletons.length).toBe(0); + expect(skeletons).toHaveLength(0); }); // When no data, all values should show "-" diff --git a/frontend/src/container/ApiMonitoring/__tests__/APIMonitoringUtils.test.tsx b/frontend/src/container/ApiMonitoring/__tests__/APIMonitoringUtils.test.tsx index 4d7efe53ba..1afe5e5e3d 100644 --- a/frontend/src/container/ApiMonitoring/__tests__/APIMonitoringUtils.test.tsx +++ b/frontend/src/container/ApiMonitoring/__tests__/APIMonitoringUtils.test.tsx @@ -303,7 +303,7 @@ describe('API Monitoring Utils', () => { const result = extractPortAndEndpoint(url); // Assert - expect(result).toEqual({ + expect(result).toStrictEqual({ port: '8080', endpoint: '/api/endpoint?param=value', }); @@ -317,7 +317,7 @@ describe('API Monitoring Utils', () => { const result = extractPortAndEndpoint(url); // Assert - expect(result).toEqual({ + expect(result).toStrictEqual({ port: '-', endpoint: '/api/endpoint', }); @@ -331,7 +331,7 @@ describe('API Monitoring Utils', () => { const result = extractPortAndEndpoint(nonUrl); // Assert - expect(result).toEqual({ + expect(result).toStrictEqual({ port: '-', endpoint: nonUrl, }); @@ -379,7 +379,7 @@ describe('API Monitoring Utils', () => { const result = getFormattedEndPointDropDownData([]); // Assert - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle undefined input', () => { @@ -392,7 +392,7 @@ describe('API Monitoring Utils', () => { // Assert // If the implementation doesn't handle undefined, just check that it returns something predictable // Based on the error, it seems the function returns undefined for undefined input - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle items without URL path', () => { @@ -460,7 +460,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.length).toBe(2); + expect(result).toHaveLength(2); // Check first item expect(result[0].statusCode).toBe('200'); @@ -493,7 +493,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.length).toBe(1); + expect(result).toHaveLength(1); expect(result[0].statusCode).toBe('-'); expect(result[0].count).toBe('-'); expect(result[0].p99Latency).toBe('-'); @@ -506,7 +506,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle undefined input', () => { @@ -518,7 +518,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle mixed status code formats and preserve order', () => { @@ -555,7 +555,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.length).toBe(3); + expect(result).toHaveLength(3); // Check order preservation - should maintain the same order as input expect(result[0].statusCode).toBe('404'); @@ -675,7 +675,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.data.result).toEqual([]); + expect(result.data.result).toStrictEqual([]); }); it('should handle empty result array', () => { @@ -695,7 +695,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.data.result).toEqual([]); + expect(result.data.result).toStrictEqual([]); }); }); @@ -779,7 +779,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.length).toBe(2); + expect(result).toHaveLength(2); // Should have two filters, one for >= start code and one for <= end code const startRangeFilter = result.find((item) => item.op === '>='); @@ -811,7 +811,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.length).toBe(2); + expect(result).toHaveLength(2); const startRangeFilter = result.find((item) => item.op === '>='); const endRangeFilter = result.find((item) => item.op === '<='); @@ -832,7 +832,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle empty metric object', () => { @@ -841,7 +841,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle metric without response_status_code', () => { @@ -855,7 +855,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should handle unsupported status code range', () => { @@ -869,7 +869,7 @@ describe('API Monitoring Utils', () => { // Assert expect(result).toBeDefined(); - expect(result.length).toBe(2); + expect(result).toHaveLength(2); // Should still have two filters const startRangeFilter = result.find((item) => item.op === '>='); diff --git a/frontend/src/container/ApiMonitoring/__tests__/AllEndpointsWidgetV5Migration.test.tsx b/frontend/src/container/ApiMonitoring/__tests__/AllEndpointsWidgetV5Migration.test.tsx index 44991b94ca..7b75fc45d9 100644 --- a/frontend/src/container/ApiMonitoring/__tests__/AllEndpointsWidgetV5Migration.test.tsx +++ b/frontend/src/container/ApiMonitoring/__tests__/AllEndpointsWidgetV5Migration.test.tsx @@ -61,25 +61,29 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => { // Query A: count() expect(queryA.aggregations).toBeDefined(); expect(Array.isArray(queryA.aggregations)).toBe(true); - expect(queryA.aggregations).toEqual([{ expression: 'count()' }]); + expect(queryA.aggregations).toStrictEqual([{ expression: 'count()' }]); expect(queryA).not.toHaveProperty('aggregateAttribute'); // Query B: p99(duration_nano) expect(queryB.aggregations).toBeDefined(); expect(Array.isArray(queryB.aggregations)).toBe(true); - expect(queryB.aggregations).toEqual([{ expression: 'p99(duration_nano)' }]); + expect(queryB.aggregations).toStrictEqual([ + { expression: 'p99(duration_nano)' }, + ]); expect(queryB).not.toHaveProperty('aggregateAttribute'); // Query C: max(timestamp) expect(queryC.aggregations).toBeDefined(); expect(Array.isArray(queryC.aggregations)).toBe(true); - expect(queryC.aggregations).toEqual([{ expression: 'max(timestamp)' }]); + expect(queryC.aggregations).toStrictEqual([ + { expression: 'max(timestamp)' }, + ]); expect(queryC).not.toHaveProperty('aggregateAttribute'); // Query D: count() (disabled, for errors) expect(queryD.aggregations).toBeDefined(); expect(Array.isArray(queryD.aggregations)).toBe(true); - expect(queryD.aggregations).toEqual([{ expression: 'count()' }]); + expect(queryD.aggregations).toStrictEqual([{ expression: 'count()' }]); expect(queryD).not.toHaveProperty('aggregateAttribute'); }); diff --git a/frontend/src/container/ApiMonitoring/__tests__/EndpointDropdownV5Migration.test.tsx b/frontend/src/container/ApiMonitoring/__tests__/EndpointDropdownV5Migration.test.tsx index f37026dfd6..4d4f122fb7 100644 --- a/frontend/src/container/ApiMonitoring/__tests__/EndpointDropdownV5Migration.test.tsx +++ b/frontend/src/container/ApiMonitoring/__tests__/EndpointDropdownV5Migration.test.tsx @@ -60,7 +60,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => { // V5 Aggregation format: aggregations array (not aggregateAttribute) expect(queryA.aggregations).toBeDefined(); expect(Array.isArray(queryA.aggregations)).toBe(true); - expect(queryA.aggregations?.[0]).toEqual({ + expect(queryA.aggregations?.[0]).toStrictEqual({ expression: 'count()', }); expect(queryA).not.toHaveProperty('aggregateAttribute'); diff --git a/frontend/src/container/ApiMonitoring/__tests__/queryParams.test.tsx b/frontend/src/container/ApiMonitoring/__tests__/queryParams.test.tsx index f5a6eb31a7..39ebf0e379 100644 --- a/frontend/src/container/ApiMonitoring/__tests__/queryParams.test.tsx +++ b/frontend/src/container/ApiMonitoring/__tests__/queryParams.test.tsx @@ -19,7 +19,7 @@ describe('API Monitoring Query Params', () => { describe('getApiMonitoringParams', () => { it('returns default params when no query param exists', () => { const search = ''; - expect(getApiMonitoringParams(search)).toEqual(DEFAULT_PARAMS); + expect(getApiMonitoringParams(search)).toStrictEqual(DEFAULT_PARAMS); }); it('parses URL query params correctly', () => { @@ -52,7 +52,7 @@ describe('API Monitoring Query Params', () => { urlParams.set('apiMonitoringParams', 'invalid-json'); const search = `?${urlParams.toString()}`; - expect(getApiMonitoringParams(search)).toEqual(DEFAULT_PARAMS); + expect(getApiMonitoringParams(search)).toStrictEqual(DEFAULT_PARAMS); }); }); diff --git a/frontend/src/container/BillingContainer/BillingContainer.test.tsx b/frontend/src/container/BillingContainer/BillingContainer.test.tsx index 5152c9b855..0ebbd49398 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.test.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.test.tsx @@ -20,7 +20,7 @@ window.ResizeObserver = describe('BillingContainer', () => { jest.setTimeout(30000); - test('Component should render', async () => { + it('Component should render', async () => { render(); const dataInjection = screen.getByRole('columnheader', { @@ -61,7 +61,7 @@ describe('BillingContainer', () => { jest.useRealTimers(); }); - test('OnTrail', async () => { + it('OnTrail', async () => { // Pin "now" so trial end (20 Oct 2023) is tomorrow => "1 days_remaining" render( @@ -73,17 +73,19 @@ describe('BillingContainer', () => { // If the component schedules any setTimeout on mount, flush them: jest.runOnlyPendingTimers(); - expect(await screen.findByText('Free Trial')).toBeInTheDocument(); - expect(await screen.findByText('billing')).toBeInTheDocument(); - expect(await screen.findByText(/\$0/i)).toBeInTheDocument(); + await expect(screen.findByText('Free Trial')).resolves.toBeInTheDocument(); + await expect(screen.findByText('billing')).resolves.toBeInTheDocument(); + await expect(screen.findByText(/\$0/i)).resolves.toBeInTheDocument(); - expect( - await screen.findByText( + await expect( + screen.findByText( /You are in free trial period. Your free trial will end on 20 Oct 2023/i, ), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); - expect(await screen.findByText(/1 days_remaining/i)).toBeInTheDocument(); + await expect( + screen.findByText(/1 days_remaining/i), + ).resolves.toBeInTheDocument(); const upgradeButtons = await screen.findAllByRole('button', { name: /upgrade_plan/i, @@ -91,13 +93,15 @@ describe('BillingContainer', () => { expect(upgradeButtons).toHaveLength(2); expect(upgradeButtons[1]).toBeInTheDocument(); - expect(await screen.findByText(/checkout_plans/i)).toBeInTheDocument(); - expect( - await screen.findByRole('link', { name: /here/i }), - ).toBeInTheDocument(); + await expect( + screen.findByText(/checkout_plans/i), + ).resolves.toBeInTheDocument(); + await expect( + screen.findByRole('link', { name: /here/i }), + ).resolves.toBeInTheDocument(); }); - test('OnTrail but trialConvertedToSubscription', async () => { + it('OnTrail but trialConvertedToSubscription', async () => { await act(async () => { render( , @@ -137,7 +141,7 @@ describe('BillingContainer', () => { }); }); - test('Not on ontrail', async () => { + it('Not on ontrail', async () => { const { findByText } = render( , {}, diff --git a/frontend/src/container/CreateAlertV2/EvaluationSettings/__tests__/utils.test.ts b/frontend/src/container/CreateAlertV2/EvaluationSettings/__tests__/utils.test.ts index 5e03899552..183d1e3ef3 100644 --- a/frontend/src/container/CreateAlertV2/EvaluationSettings/__tests__/utils.test.ts +++ b/frontend/src/container/CreateAlertV2/EvaluationSettings/__tests__/utils.test.ts @@ -242,7 +242,7 @@ describe('utils', () => { ); expect(rrulestr).toHaveBeenCalledWith(FREQ_DAILY); - expect(result).toEqual([ + expect(result).toStrictEqual([ new Date(MOCK_DATE_STRING), new Date('2024-01-16T10:30:00Z'), new Date('2024-01-17T10:30:00Z'), @@ -300,7 +300,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-01-2024 10:30:00', '01-02-2024 10:30:00', '15-02-2024 10:30:00', @@ -319,7 +319,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-01-2024 12:30:00', '19-01-2024 12:30:00', '22-01-2024 12:30:00', @@ -339,7 +339,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); // today included (15-01-2024 00:30:00) - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-01-2024 10:30:00', '19-01-2024 10:30:00', '22-01-2024 10:30:00', @@ -359,7 +359,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); // today excluded (15-01-2024 00:30:00) - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '19-01-2024 00:00:00', '22-01-2024 00:00:00', '26-01-2024 00:00:00', @@ -379,7 +379,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); // today excluded (15-01-2024 00:30:00) - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '19-01-2024 00:30:00', '22-01-2024 00:30:00', '26-01-2024 00:30:00', @@ -398,7 +398,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); // today included (15-01-2024 10:30:00) - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-01-2024 10:30:00', '15-02-2024 10:30:00', '15-03-2024 10:30:00', @@ -417,7 +417,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); // today excluded (15-01-2024 10:30:00) - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-02-2024 00:00:00', '15-03-2024 00:00:00', '15-04-2024 00:00:00', @@ -436,7 +436,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); // today excluded (15-01-2024 10:30:00) - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-02-2024 00:30:00', '15-03-2024 00:30:00', '15-04-2024 00:30:00', @@ -455,7 +455,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '29-01-2024 10:30:00', '29-02-2024 10:30:00', '29-03-2024 10:30:00', @@ -474,7 +474,7 @@ describe('utils', () => { expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '31-01-2024 10:30:00', '31-03-2024 10:30:00', '31-05-2024 10:30:00', @@ -511,7 +511,7 @@ describe('utils', () => { 5, ); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '29-01-2023 10:30:00', '29-03-2023 10:30:00', '29-04-2023 10:30:00', @@ -529,7 +529,7 @@ describe('utils', () => { ); expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-01-2024 10:40:00', '16-01-2024 10:40:00', '17-01-2024 10:40:00', @@ -547,7 +547,7 @@ describe('utils', () => { ); expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '16-01-2024 00:00:00', '17-01-2024 00:00:00', '18-01-2024 00:00:00', @@ -565,7 +565,7 @@ describe('utils', () => { ); expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '16-01-2024 00:30:00', '17-01-2024 00:30:00', '18-01-2024 00:30:00', @@ -583,7 +583,7 @@ describe('utils', () => { ); expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '15-01-2024 10:30:00', '16-01-2024 10:30:00', '17-01-2024 10:30:00', @@ -621,7 +621,7 @@ describe('utils', () => { ); expect(result).toBeDefined(); expect(Array.isArray(result)).toBe(true); - expect(result?.map((res) => formatDate(res))).toEqual([ + expect(result?.map((res) => formatDate(res))).toStrictEqual([ '31-01-2024 10:30:00', '01-02-2024 10:30:00', '02-02-2024 10:30:00', diff --git a/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx b/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx index 3a35ecc308..e87adf0e74 100644 --- a/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx +++ b/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx @@ -333,7 +333,7 @@ describe('QuerySection', () => { const result = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0]; - expect(result[0]).toEqual({ + expect(result[0]).toStrictEqual({ id: MOCK_UUID, queryType: EQueryType.QUERY_BUILDER, unit: undefined, @@ -351,7 +351,7 @@ describe('QuerySection', () => { clickhouse_sql: [initialClickHouseData], }); - expect(result[1]).toEqual({ + expect(result[1]).toStrictEqual({ [QueryParams.alertType]: AlertTypes.METRICS_BASED_ALERT, [QueryParams.ruleType]: AlertDetectionTypes.THRESHOLD_ALERT, }); @@ -371,7 +371,7 @@ describe('QuerySection', () => { const [queryArg] = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0]; - expect(queryArg).toEqual({ + expect(queryArg).toStrictEqual({ ...mockUseQueryBuilder.currentQuery, queryType: EQueryType.PROM, }); @@ -425,7 +425,7 @@ describe('QuerySection', () => { const [queryArg] = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0]; - expect(queryArg).toEqual({ + expect(queryArg).toStrictEqual({ ...mockCurrentQueryWithPromQL, queryType: EQueryType.QUERY_BUILDER, }); @@ -479,7 +479,7 @@ describe('QuerySection', () => { const [queryArg] = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0]; - expect(queryArg).toEqual({ + expect(queryArg).toStrictEqual({ ...mockCurrentQueryWithClickhouseSQL, queryType: EQueryType.QUERY_BUILDER, }); diff --git a/frontend/src/container/CreateAlertV2/context/__tests__/utils.test.tsx b/frontend/src/container/CreateAlertV2/context/__tests__/utils.test.tsx index b41d918bac..75149cc1c6 100644 --- a/frontend/src/container/CreateAlertV2/context/__tests__/utils.test.tsx +++ b/frontend/src/container/CreateAlertV2/context/__tests__/utils.test.tsx @@ -54,7 +54,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_ALERT_NAME', payload: 'Test Alert', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_STATE, name: 'Test Alert', }); @@ -66,7 +66,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_ALERT_LABELS', payload: labels, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_STATE, labels, }); @@ -77,7 +77,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_Y_AXIS_UNIT', payload: 'ms', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_STATE, yAxisUnit: 'ms', }); @@ -90,7 +90,7 @@ describe('CreateAlertV2 Context Utils', () => { yAxisUnit: 'ms', }; const result = alertCreationReducer(modifiedState, { type: 'RESET' }); - expect(result).toEqual(INITIAL_ALERT_STATE); + expect(result).toStrictEqual(INITIAL_ALERT_STATE); }); it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => { @@ -103,7 +103,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_INITIAL_STATE', payload: newState, }); - expect(result).toEqual(newState); + expect(result).toStrictEqual(newState); }); it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => { @@ -112,7 +112,7 @@ describe('CreateAlertV2 Context Utils', () => { { type: UNKNOWN_ACTION_TYPE } as any, ); - expect(result).toEqual(INITIAL_ALERT_STATE); + expect(result).toStrictEqual(INITIAL_ALERT_STATE); }); }); @@ -211,7 +211,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_SELECTED_QUERY', payload: 'B', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_THRESHOLD_STATE, selectedQuery: 'B', }); @@ -222,7 +222,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_OPERATOR', payload: AlertThresholdOperator.IS_BELOW, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_THRESHOLD_STATE, operator: AlertThresholdOperator.IS_BELOW, }); @@ -233,7 +233,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_MATCH_TYPE', payload: AlertThresholdMatchType.ALL_THE_TIME, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_THRESHOLD_STATE, matchType: AlertThresholdMatchType.ALL_THE_TIME, }); @@ -255,7 +255,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_THRESHOLDS', payload: newThresholds, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ALERT_THRESHOLD_STATE, thresholds: newThresholds, }); @@ -272,7 +272,7 @@ describe('CreateAlertV2 Context Utils', () => { thresholds: [], }; const result = alertThresholdReducer(modifiedState, { type: 'RESET' }); - expect(result).toEqual(INITIAL_ALERT_THRESHOLD_STATE); + expect(result).toStrictEqual(INITIAL_ALERT_THRESHOLD_STATE); }); it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => { @@ -289,7 +289,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_INITIAL_STATE', payload: newState, }); - expect(result).toEqual(newState); + expect(result).toStrictEqual(newState); }); it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => { @@ -298,7 +298,7 @@ describe('CreateAlertV2 Context Utils', () => { { type: UNKNOWN_ACTION_TYPE } as any, ); - expect(result).toEqual(INITIAL_ALERT_THRESHOLD_STATE); + expect(result).toStrictEqual(INITIAL_ALERT_THRESHOLD_STATE); }); }); @@ -308,7 +308,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_SEND_NOTIFICATION_IF_DATA_IS_MISSING', payload: { toleranceLimit: 21, timeUnit: UniversalYAxisUnit.HOURS }, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, sendNotificationIfDataIsMissing: { ...INITIAL_ADVANCED_OPTIONS_STATE.sendNotificationIfDataIsMissing, @@ -323,7 +323,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'TOGGLE_SEND_NOTIFICATION_IF_DATA_IS_MISSING', payload: true, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, sendNotificationIfDataIsMissing: { ...INITIAL_ADVANCED_OPTIONS_STATE.sendNotificationIfDataIsMissing, @@ -337,7 +337,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_ENFORCE_MINIMUM_DATAPOINTS', payload: { minimumDatapoints: 10 }, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, enforceMinimumDatapoints: { ...INITIAL_ADVANCED_OPTIONS_STATE.enforceMinimumDatapoints, @@ -351,7 +351,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'TOGGLE_ENFORCE_MINIMUM_DATAPOINTS', payload: true, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, enforceMinimumDatapoints: { ...INITIAL_ADVANCED_OPTIONS_STATE.enforceMinimumDatapoints, @@ -365,7 +365,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_DELAY_EVALUATION', payload: { delay: 10, timeUnit: UniversalYAxisUnit.HOURS }, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, delayEvaluation: { delay: 10, timeUnit: UniversalYAxisUnit.HOURS }, }); @@ -386,7 +386,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_EVALUATION_CADENCE', payload: newCadence, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, evaluationCadence: { ...INITIAL_ADVANCED_OPTIONS_STATE.evaluationCadence, @@ -400,7 +400,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_EVALUATION_CADENCE_MODE', payload: 'custom', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_ADVANCED_OPTIONS_STATE, evaluationCadence: { ...INITIAL_ADVANCED_OPTIONS_STATE.evaluationCadence, @@ -415,7 +415,7 @@ describe('CreateAlertV2 Context Utils', () => { delayEvaluation: { delay: 10, timeUnit: UniversalYAxisUnit.HOURS }, }; const result = advancedOptionsReducer(modifiedState, { type: 'RESET' }); - expect(result).toEqual(INITIAL_ADVANCED_OPTIONS_STATE); + expect(result).toStrictEqual(INITIAL_ADVANCED_OPTIONS_STATE); }); it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => { @@ -431,7 +431,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_INITIAL_STATE', payload: newState, }); - expect(result).toEqual(newState); + expect(result).toStrictEqual(newState); }); it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => { @@ -440,7 +440,7 @@ describe('CreateAlertV2 Context Utils', () => { { type: UNKNOWN_ACTION_TYPE } as any, ); - expect(result).toEqual(INITIAL_ADVANCED_OPTIONS_STATE); + expect(result).toStrictEqual(INITIAL_ADVANCED_OPTIONS_STATE); }); }); @@ -455,7 +455,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_WINDOW_TYPE', payload: 'rolling', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ windowType: 'rolling', timeframe: INITIAL_EVALUATION_WINDOW_STATE.timeframe, startingAt: INITIAL_EVALUATION_WINDOW_STATE.startingAt, @@ -467,7 +467,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_WINDOW_TYPE', payload: 'cumulative', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ windowType: 'cumulative', timeframe: 'currentHour', startingAt: INITIAL_EVALUATION_WINDOW_STATE.startingAt, @@ -479,7 +479,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_TIMEFRAME', payload: '10m0s', }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_EVALUATION_WINDOW_STATE, timeframe: '10m0s', }); @@ -496,7 +496,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_STARTING_AT', payload: newStartingAt, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_EVALUATION_WINDOW_STATE, startingAt: newStartingAt, }); @@ -514,7 +514,7 @@ describe('CreateAlertV2 Context Utils', () => { }, }; const result = evaluationWindowReducer(modifiedState, { type: 'RESET' }); - expect(result).toEqual(INITIAL_EVALUATION_WINDOW_STATE); + expect(result).toStrictEqual(INITIAL_EVALUATION_WINDOW_STATE); }); it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => { @@ -532,7 +532,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_INITIAL_STATE', payload: newState, }); - expect(result).toEqual(newState); + expect(result).toStrictEqual(newState); }); it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => { @@ -541,7 +541,7 @@ describe('CreateAlertV2 Context Utils', () => { { type: UNKNOWN_ACTION_TYPE } as any, ); - expect(result).toEqual(INITIAL_EVALUATION_WINDOW_STATE); + expect(result).toStrictEqual(INITIAL_EVALUATION_WINDOW_STATE); }); }); @@ -555,7 +555,7 @@ describe('CreateAlertV2 Context Utils', () => { payload: notifications, }, ); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_NOTIFICATION_SETTINGS_STATE, multipleNotifications: notifications, }); @@ -570,7 +570,7 @@ describe('CreateAlertV2 Context Utils', () => { type: 'SET_MULTIPLE_NOTIFICATIONS', payload: null, }); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...modifiedState, multipleNotifications: null, }); @@ -590,7 +590,7 @@ describe('CreateAlertV2 Context Utils', () => { payload: reNotification, }, ); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_NOTIFICATION_SETTINGS_STATE, reNotification, }); @@ -605,7 +605,7 @@ describe('CreateAlertV2 Context Utils', () => { payload: description, }, ); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_NOTIFICATION_SETTINGS_STATE, description, }); @@ -619,7 +619,7 @@ describe('CreateAlertV2 Context Utils', () => { payload: true, }, ); - expect(result).toEqual({ + expect(result).toStrictEqual({ ...INITIAL_NOTIFICATION_SETTINGS_STATE, routingPolicies: true, }); @@ -640,7 +640,7 @@ describe('CreateAlertV2 Context Utils', () => { const result = notificationSettingsReducer(modifiedState, { type: 'RESET', }); - expect(result).toEqual(INITIAL_NOTIFICATION_SETTINGS_STATE); + expect(result).toStrictEqual(INITIAL_NOTIFICATION_SETTINGS_STATE); }); it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => { @@ -662,7 +662,7 @@ describe('CreateAlertV2 Context Utils', () => { payload: newState, }, ); - expect(result).toEqual(newState); + expect(result).toStrictEqual(newState); }); it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => { @@ -671,7 +671,7 @@ describe('CreateAlertV2 Context Utils', () => { { type: UNKNOWN_ACTION_TYPE } as any, ); - expect(result).toEqual(INITIAL_NOTIFICATION_SETTINGS_STATE); + expect(result).toStrictEqual(INITIAL_NOTIFICATION_SETTINGS_STATE); }); }); }); diff --git a/frontend/src/container/CustomDomainSettings/__tests__/CustomDomainSettings.test.tsx b/frontend/src/container/CustomDomainSettings/__tests__/CustomDomainSettings.test.tsx index 6808b589f9..c4e16fc8a0 100644 --- a/frontend/src/container/CustomDomainSettings/__tests__/CustomDomainSettings.test.tsx +++ b/frontend/src/container/CustomDomainSettings/__tests__/CustomDomainSettings.test.tsx @@ -113,7 +113,7 @@ describe('CustomDomainSettings', () => { await user.click(screen.getByRole('button', { name: /apply changes/i })); await waitFor(() => { - expect(capturedBody).toEqual({ name: 'myteam' }); + expect(capturedBody).toStrictEqual({ name: 'myteam' }); }); }); @@ -143,9 +143,9 @@ describe('CustomDomainSettings', () => { await user.type(input, 'myteam'); await user.click(screen.getByRole('button', { name: /apply changes/i })); - expect( - await screen.findByRole('button', { name: /contact support/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('button', { name: /contact support/i }), + ).resolves.toBeInTheDocument(); }); it('shows validation error when subdomain is less than 3 characters', async () => { @@ -255,7 +255,7 @@ describe('CustomDomainSettings', () => { }, }); - expect(await screen.findByText('My Org Name')).toBeInTheDocument(); + await expect(screen.findByText('My Org Name')).resolves.toBeInTheDocument(); }); it('falls back to customDomainSubdomain when org displayName is missing', async () => { @@ -269,7 +269,7 @@ describe('CustomDomainSettings', () => { appContextOverrides: { org: [] }, }); - expect(await screen.findByText('custom-host')).toBeInTheDocument(); + await expect(screen.findByText('custom-host')).resolves.toBeInTheDocument(); }); it('falls back to activeHost.name when neither org name nor custom domain exists', async () => { @@ -294,7 +294,9 @@ describe('CustomDomainSettings', () => { }); // 'accepted-starfish' is the default host's name - expect(await screen.findByText('accepted-starfish')).toBeInTheDocument(); + await expect( + screen.findByText('accepted-starfish'), + ).resolves.toBeInTheDocument(); }); it('does not render the card name row if workspaceName is totally falsy', async () => { diff --git a/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/VariableItem/VariableItem.test.tsx b/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/VariableItem/VariableItem.test.tsx index a5efbc57c4..fd18c70a53 100644 --- a/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/VariableItem/VariableItem.test.tsx +++ b/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/VariableItem/VariableItem.test.tsx @@ -155,7 +155,7 @@ describe('VariableItem Component', () => { jest.clearAllMocks(); }); - test('renders without crashing', () => { + it('renders without crashing', () => { renderVariableItem(); expect(screen.getByText(TEXT.ALL_VARIABLES)).toBeInTheDocument(); @@ -165,7 +165,7 @@ describe('VariableItem Component', () => { }); describe('Variable Name Validation', () => { - test('shows error when variable name already exists', () => { + it('shows error when variable name already exists', () => { // Set validateName to return false (name exists) const mockValidateName = jest.fn().mockReturnValue(false); @@ -181,7 +181,7 @@ describe('VariableItem Component', () => { // We won't check for button disabled state as it might be inconsistent in tests }); - test('allows save when current variable name is used', () => { + it('allows save when current variable name is used', () => { // Mock validate to return false for all other names but true for own name const mockValidateName = jest .fn() @@ -197,7 +197,7 @@ describe('VariableItem Component', () => { expect(screen.queryByText(TEXT.VARIABLE_EXISTS)).not.toBeInTheDocument(); }); - test('shows error when variable name contains whitespace', () => { + it('shows error when variable name contains whitespace', () => { renderVariableItem({ ...basicVariableData, name: '' }); // Enter a name with whitespace @@ -212,7 +212,7 @@ describe('VariableItem Component', () => { expect(saveButton).toBeDisabled(); }); - test('allows variable name without whitespace', () => { + it('allows variable name without whitespace', () => { renderVariableItem({ ...basicVariableData, name: '' }); // Enter a valid name without whitespace @@ -223,7 +223,7 @@ describe('VariableItem Component', () => { expect(screen.queryByText(TEXT.VARIABLE_WHITESPACE)).not.toBeInTheDocument(); }); - test('validates whitespace in auto-generated name for dynamic variables', () => { + it('validates whitespace in auto-generated name for dynamic variables', () => { // Create a dynamic variable with empty name const dynamicVariable: IDashboardVariable = { ...basicVariableData, @@ -241,7 +241,7 @@ describe('VariableItem Component', () => { }); describe('Dynamic Variable Attribute Key Validation', () => { - test('shows error when attribute key already exists', async () => { + it('shows error when attribute key already exists', async () => { // Mock validateAttributeKey to return false (attribute key exists) const mockValidateAttributeKey = jest.fn().mockReturnValue(false); @@ -277,7 +277,7 @@ describe('VariableItem Component', () => { expect(saveButton).toBeDisabled(); }); - test('allows saving when attribute key is unique', async () => { + it('allows saving when attribute key is unique', async () => { // Mock validateAttributeKey to return true (attribute key is unique) const mockValidateAttributeKey = jest.fn().mockReturnValue(true); @@ -315,7 +315,7 @@ describe('VariableItem Component', () => { expect(saveButton).not.toBeDisabled(); }); - test('allows same attribute key for current variable being edited', async () => { + it('allows same attribute key for current variable being edited', async () => { // Mock validateAttributeKey to return true for same variable const mockValidateAttributeKey = jest.fn().mockImplementation( (attributeKey, currentVariableId) => @@ -348,7 +348,7 @@ describe('VariableItem Component', () => { }); }); - test('does not validate attribute key for non-dynamic variables', async () => { + it('does not validate attribute key for non-dynamic variables', async () => { // Mock validateAttributeKey to return false (would show error for dynamic) const mockValidateAttributeKey = jest.fn().mockReturnValue(false); @@ -377,7 +377,7 @@ describe('VariableItem Component', () => { }); describe('Variable Type Switching', () => { - test('switches to CUSTOM variable type correctly', () => { + it('switches to CUSTOM variable type correctly', () => { renderVariableItem(); // Find the Query button @@ -401,7 +401,7 @@ describe('VariableItem Component', () => { expect(screen.getByText(TEXT.OPTIONS)).toBeInTheDocument(); }); - test('switches to TEXTBOX variable type correctly', () => { + it('switches to TEXTBOX variable type correctly', () => { renderVariableItem(); // Find and click Textbox button @@ -424,7 +424,7 @@ describe('VariableItem Component', () => { }); describe('MultiSelect and ALL Option', () => { - test('enables ALL option only when multiSelect is enabled', async () => { + it('enables ALL option only when multiSelect is enabled', async () => { renderVariableItem(); // Initially, ALL option should not be visible @@ -457,7 +457,7 @@ describe('VariableItem Component', () => { }); }); - test('disables ALL option when multiSelect is disabled', async () => { + it('disables ALL option when multiSelect is disabled', async () => { // Create variable with multiSelect and showALLOption both enabled const variable: IDashboardVariable = { ...basicVariableData, @@ -501,7 +501,7 @@ describe('VariableItem Component', () => { }); describe('Cancel and Navigation', () => { - test('calls onCancel when clicking All Variables button', () => { + it('calls onCancel when clicking All Variables button', () => { renderVariableItem(); // Click All variables button @@ -511,7 +511,7 @@ describe('VariableItem Component', () => { expect(onCancel).toHaveBeenCalledTimes(1); }); - test('calls onCancel when clicking Discard button', () => { + it('calls onCancel when clicking Discard button', () => { renderVariableItem(); // Click Discard button @@ -550,7 +550,7 @@ describe('VariableItem Component', () => { }; // Test for cyclic dependency detection - test('detects circular dependency and shows error message', async () => { + it('detects circular dependency and shows error message', async () => { // Create variables with circular dependency const variable1 = createVariable( TEST_VAR_IDS.VAR1, @@ -577,7 +577,7 @@ describe('VariableItem Component', () => { }); // Test for saving with no circular dependency - test('allows saving when no circular dependency exists', async () => { + it('allows saving when no circular dependency exists', async () => { // Create variables without circular dependency const variable1 = createVariable( TEST_VAR_IDS.VAR1, @@ -608,7 +608,7 @@ describe('VariableItem Component', () => { }); // Test with multiple variable formats in query - test('detects circular dependency with different variable formats', async () => { + it('detects circular dependency with different variable formats', async () => { // Create variables with circular dependency using different formats const variable1 = createVariable( TEST_VAR_IDS.VAR1, @@ -645,7 +645,7 @@ describe('VariableItem Component', () => { }); describe('Textbox Variable Default Value Handling', () => { - test('saves textbox variable with defaultValue and selectedValue set to textboxValue', async () => { + it('saves textbox variable with defaultValue and selectedValue set to textboxValue', async () => { const user = userEvent.setup(); const textboxVariable: IDashboardVariable = { id: TEST_VAR_IDS.VAR1, @@ -676,7 +676,7 @@ describe('VariableItem Component', () => { ); }); - test('saves textbox variable with empty values when textboxValue is empty', async () => { + it('saves textbox variable with empty values when textboxValue is empty', async () => { const user = userEvent.setup(); const textboxVariable: IDashboardVariable = { id: TEST_VAR_IDS.VAR1, @@ -707,7 +707,7 @@ describe('VariableItem Component', () => { ); }); - test('updates textbox defaultValue and selectedValue when user changes textboxValue input', async () => { + it('updates textbox defaultValue and selectedValue when user changes textboxValue input', async () => { const user = userEvent.setup(); const textboxVariable: IDashboardVariable = { id: TEST_VAR_IDS.VAR1, @@ -745,7 +745,7 @@ describe('VariableItem Component', () => { ); }); - test('non-textbox variables use variableDefaultValue instead of textboxValue', async () => { + it('non-textbox variables use variableDefaultValue instead of textboxValue', async () => { const user = userEvent.setup(); const queryVariable: IDashboardVariable = { id: TEST_VAR_IDS.VAR1, @@ -780,7 +780,7 @@ describe('VariableItem Component', () => { expect(savedVariable.defaultValue).not.toBe('should-not-be-used'); }); - test('switching to textbox type sets defaultValue and selectedValue correctly on save', async () => { + it('switching to textbox type sets defaultValue and selectedValue correctly on save', async () => { const user = userEvent.setup(); // Start with a QUERY variable const queryVariable: IDashboardVariable = { diff --git a/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/index.tsx b/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/index.tsx index 7c72dc3ba9..5830c5ae88 100644 --- a/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/index.tsx +++ b/frontend/src/container/DashboardContainer/DashboardSettings/DashboardVariableSettings/index.tsx @@ -38,7 +38,7 @@ function TableRow({ children, ...props }: RowProps): JSX.Element { transition, isDragging, } = useSortable({ - // @ts-ignore + // @ts-expect-error id: props['data-row-key'], }); @@ -148,7 +148,7 @@ function VariablesSettings({ }); if (name) { - // @ts-ignore + // @ts-expect-error variableNamesMap[name] = name; } @@ -391,7 +391,7 @@ function VariablesSettings({ const variableName = updatedVariables[index].name; if (variableName) { - // @ts-ignore + // @ts-expect-error reArrangedVariables[variableName] = { ...updatedVariables[index], order: index, diff --git a/frontend/src/container/DashboardContainer/DashboardSettings/PublicDashboard/__tests__/PublicDashboard.test.tsx b/frontend/src/container/DashboardContainer/DashboardSettings/PublicDashboard/__tests__/PublicDashboard.test.tsx index cd48d06981..6d89632cb3 100644 --- a/frontend/src/container/DashboardContainer/DashboardSettings/PublicDashboard/__tests__/PublicDashboard.test.tsx +++ b/frontend/src/container/DashboardContainer/DashboardSettings/PublicDashboard/__tests__/PublicDashboard.test.tsx @@ -109,11 +109,13 @@ describe('PublicDashboardSetting', () => { ).toBeInTheDocument(); }); - expect( - await screen.findByRole('checkbox', { name: /enable time range/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('checkbox', { name: /enable time range/i }), + ).resolves.toBeInTheDocument(); - expect(await screen.findByText(/default time range/i)).toBeInTheDocument(); + await expect( + screen.findByText(/default time range/i), + ).resolves.toBeInTheDocument(); expect(screen.getByText(/Last 30 minutes/i)).toBeInTheDocument(); @@ -123,9 +125,9 @@ describe('PublicDashboardSetting', () => { ).toBeInTheDocument(); }); - expect( - await screen.findByRole('button', { name: /publish dashboard/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('button', { name: /publish dashboard/i }), + ).resolves.toBeInTheDocument(); }); }); @@ -149,9 +151,9 @@ describe('PublicDashboardSetting', () => { ).toBeInTheDocument(); }); - expect( - await screen.findByRole('checkbox', { name: /enable time range/i }), - ).toBeChecked(); + await expect( + screen.findByRole('checkbox', { name: /enable time range/i }), + ).resolves.toBeChecked(); await waitFor(() => { expect(screen.getByText(/default time range/i)).toBeInTheDocument(); @@ -163,13 +165,13 @@ describe('PublicDashboardSetting', () => { expect(screen.getByText(/Public Dashboard URL/i)).toBeInTheDocument(); }); - expect( - await screen.findByRole('button', { name: /update published dashboard/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('button', { name: /update published dashboard/i }), + ).resolves.toBeInTheDocument(); - expect( - await screen.findByRole('button', { name: /unpublish dashboard/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('button', { name: /unpublish dashboard/i }), + ).resolves.toBeInTheDocument(); }); }); @@ -249,7 +251,7 @@ describe('PublicDashboardSetting', () => { rest.post(publicDashboardURL, async (req, res, ctx) => { const body = await req.json(); createApiCalled = true; - expect(body).toEqual({ + expect(body).toStrictEqual({ timeRangeEnabled: true, defaultTimeRange: DEFAULT_TIME_RANGE, }); @@ -318,7 +320,7 @@ describe('PublicDashboardSetting', () => { await waitFor(() => { expect(updateApiCalled).toBe(true); - expect(capturedRequestBody).toEqual({ + expect(capturedRequestBody).toStrictEqual({ timeRangeEnabled: true, defaultTimeRange: DEFAULT_TIME_RANGE, }); diff --git a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/VariableItem.test.tsx b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/VariableItem.test.tsx index 3d6ac415df..633eae5b94 100644 --- a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/VariableItem.test.tsx +++ b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/VariableItem.test.tsx @@ -40,7 +40,7 @@ describe('VariableItem', () => { useEffectSpy.mockRestore(); }); - test('renders component with default props', () => { + it('renders component with default props', () => { render( { expect(screen.getByText('$testVariable')).toBeInTheDocument(); }); - test('renders Input when the variable type is TEXTBOX', () => { + it('renders Input when the variable type is TEXTBOX', () => { render( { ).toBeInTheDocument(); }); - test('calls onValueUpdate when Input value changes and blurs', async () => { + it('calls onValueUpdate when Input value changes and blurs', async () => { render( { }); }); - test('renders a Select element when variable type is CUSTOM', () => { + it('renders a Select element when variable type is CUSTOM', () => { render( { expect(screen.getByTestId('variable-select')).toBeInTheDocument(); }); - test('renders a Select element with all selected', async () => { + it('renders a Select element with all selected', async () => { const customVariableData = { ...mockCustomVariableData, allSelected: true, @@ -138,7 +138,7 @@ describe('VariableItem', () => { expect(screen.getByText('ALL')).toBeInTheDocument(); }); - test('calls useEffect when the component mounts', () => { + it('calls useEffect when the component mounts', () => { render( { ); }); - expect(callOrder).toEqual([ + expect(callOrder).toStrictEqual([ 'updateDashboardVariablesStore', 'enqueueDescendantsOfVariable', ]); diff --git a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/VariableItem.defaulting.behavior.test.tsx b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/VariableItem.defaulting.behavior.test.tsx index ba8c6ec062..9adb364279 100644 --- a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/VariableItem.defaulting.behavior.test.tsx +++ b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/VariableItem.defaulting.behavior.test.tsx @@ -32,7 +32,7 @@ describe('VariableItem Default Value Selection Behavior', () => { }); describe('Single Select Variables', () => { - test('should keep previous selection value', async () => { + it('should keep previous selection value', async () => { const variable: IDashboardVariable = { id: TEST_VARIABLE_ID, name: TEST_VARIABLE_NAME, @@ -51,10 +51,10 @@ describe('VariableItem Default Value Selection Behavior', () => { expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument(); }); - expect(await screen.findByText('option1')).toBeInTheDocument(); + await expect(screen.findByText('option1')).resolves.toBeInTheDocument(); }); - test('should auto-select first option when no previous and no default', async () => { + it('should auto-select first option when no previous and no default', async () => { const variable: IDashboardVariable = { id: TEST_VARIABLE_ID, name: TEST_VARIABLE_NAME, @@ -74,12 +74,12 @@ describe('VariableItem Default Value Selection Behavior', () => { }); // With the new variable select strategy, the first option is auto-selected - expect(await screen.findByText('option1')).toBeInTheDocument(); + await expect(screen.findByText('option1')).resolves.toBeInTheDocument(); }); }); describe('Multi Select Variables with ALL enabled', () => { - test('should show ALL when all options are selected', async () => { + it('should show ALL when all options are selected', async () => { const variable: IDashboardVariable = { id: TEST_VARIABLE_ID, name: TEST_VARIABLE_NAME, @@ -99,12 +99,12 @@ describe('VariableItem Default Value Selection Behavior', () => { expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument(); }); - expect(await screen.findByText('ALL')).toBeInTheDocument(); + await expect(screen.findByText('ALL')).resolves.toBeInTheDocument(); }); }); describe('Multi Select Variables with ALL disabled', () => { - test('should show placeholder when no selection', async () => { + it('should show placeholder when no selection', async () => { const variable: IDashboardVariable = { id: TEST_VARIABLE_ID, name: TEST_VARIABLE_NAME, @@ -123,7 +123,7 @@ describe('VariableItem Default Value Selection Behavior', () => { expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument(); }); - expect(await screen.findByText('Select value')).toBeInTheDocument(); + await expect(screen.findByText('Select value')).resolves.toBeInTheDocument(); }); }); }); diff --git a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/dashboardVariables.test.tsx b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/dashboardVariables.test.tsx index 904f6b0cc3..c84f362b14 100644 --- a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/dashboardVariables.test.tsx +++ b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/__test__/dashboardVariables.test.tsx @@ -47,7 +47,7 @@ describe('dashboardVariables - utilities and processors', () => { }, ]; - test.each(testCases)( + it.each(testCases)( 'should update variable node when $scenario', ({ nodeToUpdate, expected }) => { const updatedVariables: string[] = []; @@ -57,7 +57,7 @@ describe('dashboardVariables - utilities and processors', () => { onUpdateVariableNode(nodeToUpdate, graph, topologicalOrder, callback); - expect(updatedVariables).toEqual(expected); + expect(updatedVariables).toStrictEqual(expected); }, ); @@ -66,7 +66,7 @@ describe('dashboardVariables - utilities and processors', () => { onUpdateVariableNode('http_status_code', graph, [], (node) => updatedVariables.push(node), ); - expect(updatedVariables).toEqual([]); + expect(updatedVariables).toStrictEqual([]); }); }); @@ -87,11 +87,11 @@ describe('dashboardVariables - utilities and processors', () => { environment: [], }; - expect(buildParentDependencyGraph(graph)).toEqual(expected); + expect(buildParentDependencyGraph(graph)).toStrictEqual(expected); }); it('should handle empty graph', () => { - expect(buildParentDependencyGraph({})).toEqual({}); + expect(buildParentDependencyGraph({})).toStrictEqual({}); }); }); @@ -142,13 +142,13 @@ describe('dashboardVariables - utilities and processors', () => { }, }; - expect(buildDependencyGraph(graph)).toEqual(expected); + expect(buildDependencyGraph(graph)).toStrictEqual(expected); }); it('should return empty transitiveDescendants for an empty graph', () => { const result = buildDependencyGraph({}); - expect(result.transitiveDescendants).toEqual({}); - expect(result.order).toEqual([]); + expect(result.transitiveDescendants).toStrictEqual({}); + expect(result.order).toStrictEqual([]); expect(result.hasCycle).toBe(false); }); @@ -159,7 +159,7 @@ describe('dashboardVariables - utilities and processors', () => { c: [], }; const result = buildDependencyGraph(linearGraph); - expect(result.transitiveDescendants).toEqual({ + expect(result.transitiveDescendants).toStrictEqual({ a: ['b', 'c'], b: ['c'], c: [], @@ -174,13 +174,13 @@ describe('dashboardVariables - utilities and processors', () => { d: [], }; const result = buildDependencyGraph(diamondGraph); - expect(result.transitiveDescendants.a).toEqual( + expect(result.transitiveDescendants.a).toStrictEqual( expect.arrayContaining(['b', 'c', 'd']), ); expect(result.transitiveDescendants.a).toHaveLength(3); - expect(result.transitiveDescendants.b).toEqual(['d']); - expect(result.transitiveDescendants.c).toEqual(['d']); - expect(result.transitiveDescendants.d).toEqual([]); + expect(result.transitiveDescendants.b).toStrictEqual(['d']); + expect(result.transitiveDescendants.c).toStrictEqual(['d']); + expect(result.transitiveDescendants.d).toStrictEqual([]); }); it('should handle disconnected components in transitiveDescendants', () => { @@ -191,10 +191,10 @@ describe('dashboardVariables - utilities and processors', () => { y: [], }; const result = buildDependencyGraph(disconnectedGraph); - expect(result.transitiveDescendants.a).toEqual(['b']); - expect(result.transitiveDescendants.b).toEqual([]); - expect(result.transitiveDescendants.x).toEqual(['y']); - expect(result.transitiveDescendants.y).toEqual([]); + expect(result.transitiveDescendants.a).toStrictEqual(['b']); + expect(result.transitiveDescendants.b).toStrictEqual([]); + expect(result.transitiveDescendants.x).toStrictEqual(['y']); + expect(result.transitiveDescendants.y).toStrictEqual([]); }); it('should return empty transitiveDescendants for all leaf nodes', () => { @@ -204,7 +204,7 @@ describe('dashboardVariables - utilities and processors', () => { c: [], }; const result = buildDependencyGraph(leafOnlyGraph); - expect(result.transitiveDescendants).toEqual({ + expect(result.transitiveDescendants).toStrictEqual({ a: [], b: [], c: [], @@ -225,11 +225,11 @@ describe('dashboardVariables - utilities and processors', () => { environment: [], }; - expect(buildDependencies(variables)).toEqual(expected); + expect(buildDependencies(variables)).toStrictEqual(expected); }); it('should handle empty variables array', () => { - expect(buildDependencies([])).toEqual({}); + expect(buildDependencies([])).toStrictEqual({}); }); }); }); diff --git a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/util.ts b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/util.ts index 695e0727c6..dbe7e5d213 100644 --- a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/util.ts +++ b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/util.ts @@ -36,7 +36,7 @@ export const convertVariablesToDbFormat = ( variblesArr.reduce((result, obj: IDashboardVariable) => { const { id } = obj; - // @ts-ignore + // @ts-expect-error result[id] = obj; return result; }, {}); diff --git a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/utils.test.ts b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/utils.test.ts index 9b82d3e23a..b2158f273d 100644 --- a/frontend/src/container/DashboardContainer/DashboardVariablesSelection/utils.test.ts +++ b/frontend/src/container/DashboardContainer/DashboardVariablesSelection/utils.test.ts @@ -81,7 +81,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('deployment', graph, topologicalOrder, callback); - expect(visited).toEqual(['deployment', 'namespace', 'service', 'pod']); + expect(visited).toStrictEqual(['deployment', 'namespace', 'service', 'pod']); }); it('should call callback starting from a middle node', () => { @@ -92,7 +92,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('namespace', graph, topologicalOrder, callback); - expect(visited).toEqual(['namespace', 'service', 'pod']); + expect(visited).toStrictEqual(['namespace', 'service', 'pod']); }); it('should only call callback for the leaf node when updating leaf', () => { @@ -103,7 +103,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('pod', graph, topologicalOrder, callback); - expect(visited).toEqual(['pod']); + expect(visited).toStrictEqual(['pod']); }); it('should handle CUSTOM variable not in topologicalOrder by updating its children', () => { @@ -116,7 +116,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('customVar', graph, topologicalOrder, callback); // Should process namespace and its descendants (service, pod) - expect(visited).toEqual(['namespace', 'service', 'pod']); + expect(visited).toStrictEqual(['namespace', 'service', 'pod']); }); it('should handle node not in graph gracefully', () => { @@ -128,7 +128,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('unknownNode', graph, topologicalOrder, callback); // Should not call callback for any node since unknownNode has no children - expect(visited).toEqual([]); + expect(visited).toStrictEqual([]); }); it('should handle empty graph', () => { @@ -140,7 +140,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('deployment', {}, topologicalOrder, callback); // deployment is in topologicalOrder, so callback is called for it - expect(visited).toEqual(['deployment']); + expect(visited).toStrictEqual(['deployment']); }); it('should handle empty topologicalOrder', () => { @@ -151,7 +151,7 @@ describe('onUpdateVariableNode', () => { onUpdateVariableNode('deployment', graph, [], callback); - expect(visited).toEqual([]); + expect(visited).toStrictEqual([]); }); it('should handle CUSTOM variable with multiple children', () => { @@ -173,7 +173,7 @@ describe('onUpdateVariableNode', () => { ); // Should process namespace, service, and pod (descendants) - expect(visited).toEqual(['namespace', 'service', 'pod']); + expect(visited).toStrictEqual(['namespace', 'service', 'pod']); }); }); @@ -200,11 +200,15 @@ function makeDynamicVar( describe('mergeUniqueStrings', () => { it('should merge two arrays and deduplicate', () => { - expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toEqual(['a', 'b', 'c']); + expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toStrictEqual([ + 'a', + 'b', + 'c', + ]); }); it('should convert numbers and booleans to strings', () => { - expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toEqual([ + expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toStrictEqual([ '1', 'true', 'hello', @@ -214,15 +218,15 @@ describe('mergeUniqueStrings', () => { }); it('should deduplicate when number and its string form both appear', () => { - expect(mergeUniqueStrings([42], ['42'])).toEqual(['42']); + expect(mergeUniqueStrings([42], ['42'])).toStrictEqual(['42']); }); it('should handle a single array', () => { - expect(mergeUniqueStrings(['x', 'y', 'x'])).toEqual(['x', 'y']); + expect(mergeUniqueStrings(['x', 'y', 'x'])).toStrictEqual(['x', 'y']); }); it('should handle three or more arrays', () => { - expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toEqual([ + expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toStrictEqual([ 'a', 'b', 'c', @@ -230,15 +234,19 @@ describe('mergeUniqueStrings', () => { }); it('should return empty array when no arrays are provided', () => { - expect(mergeUniqueStrings()).toEqual([]); + expect(mergeUniqueStrings()).toStrictEqual([]); }); it('should return empty array when all input arrays are empty', () => { - expect(mergeUniqueStrings([], [], [])).toEqual([]); + expect(mergeUniqueStrings([], [], [])).toStrictEqual([]); }); it('should preserve order of first occurrence', () => { - expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toEqual(['c', 'a', 'b']); + expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toStrictEqual([ + 'c', + 'a', + 'b', + ]); }); }); diff --git a/frontend/src/container/DashboardContainer/__test__/PanelManagement.test.tsx b/frontend/src/container/DashboardContainer/__test__/PanelManagement.test.tsx index 84d74b40f7..df04ca5c34 100644 --- a/frontend/src/container/DashboardContainer/__test__/PanelManagement.test.tsx +++ b/frontend/src/container/DashboardContainer/__test__/PanelManagement.test.tsx @@ -424,7 +424,7 @@ describe('Panel Management Tests', () => { ); // Should return dashboard unchanged - expect(updatedDashboard).toEqual(dashboard); + expect(updatedDashboard).toStrictEqual(dashboard); }); it('should handle undefined dashboard gracefully', () => { diff --git a/frontend/src/container/DashboardContainer/visualization/charts/utils/__tests__/stackUtils.test.ts b/frontend/src/container/DashboardContainer/visualization/charts/utils/__tests__/stackUtils.test.ts index d9e049652a..5406afa9a3 100644 --- a/frontend/src/container/DashboardContainer/visualization/charts/utils/__tests__/stackUtils.test.ts +++ b/frontend/src/container/DashboardContainer/visualization/charts/utils/__tests__/stackUtils.test.ts @@ -13,7 +13,7 @@ describe('stackUtils', () => { [4, 5, 6], ]; const { data: result } = stack(data, neverOmit); - expect(result[0]).toEqual([100, 200, 300]); + expect(result[0]).toStrictEqual([100, 200, 300]); }); it('stacks value series cumulatively (last = raw, first = total)', () => { @@ -26,9 +26,9 @@ describe('stackUtils', () => { ]; const { data: result } = stack(data, neverOmit); // result[1] = s1+s2+s3, result[2] = s2+s3, result[3] = s3 - expect(result[1]).toEqual([12, 15, 18]); // 1+4+7, 2+5+8, 3+6+9 - expect(result[2]).toEqual([11, 13, 15]); // 4+7, 5+8, 6+9 - expect(result[3]).toEqual([7, 8, 9]); + expect(result[1]).toStrictEqual([12, 15, 18]); // 1+4+7, 2+5+8, 3+6+9 + expect(result[2]).toStrictEqual([11, 13, 15]); // 4+7, 5+8, 6+9 + expect(result[3]).toStrictEqual([7, 8, 9]); }); it('treats null values as 0 when stacking', () => { @@ -38,8 +38,8 @@ describe('stackUtils', () => { [null, 10], ]; const { data: result } = stack(data, neverOmit); - expect(result[1]).toEqual([1, 10]); // total - expect(result[2]).toEqual([0, 10]); // last series with null→0 + expect(result[1]).toStrictEqual([1, 10]); // total + expect(result[2]).toStrictEqual([0, 10]); // last series with null→0 }); it('copies omitted series as-is without accumulating', () => { @@ -53,9 +53,9 @@ describe('stackUtils', () => { const omitSeries2 = (i: number): boolean => i === 2; const { data: result } = stack(data, omitSeries2); // series 3 raw: [1, 2]; series 2 omitted: [100, 200] as-is; series 1 stacked with s3: [11, 22] - expect(result[1]).toEqual([11, 22]); // 10+1, 20+2 - expect(result[2]).toEqual([100, 200]); // copied, not stacked - expect(result[3]).toEqual([1, 2]); + expect(result[1]).toStrictEqual([11, 22]); // 10+1, 20+2 + expect(result[2]).toStrictEqual([100, 200]); // copied, not stacked + expect(result[3]).toStrictEqual([1, 2]); }); it('returns bands between consecutive visible series when none omitted', () => { @@ -66,7 +66,7 @@ describe('stackUtils', () => { [5, 6], ]; const { bands } = stack(data, neverOmit); - expect(bands).toEqual([{ series: [1, 2] }, { series: [2, 3] }]); + expect(bands).toStrictEqual([{ series: [1, 2] }, { series: [2, 3] }]); }); it('returns bands only between visible series when some are omitted', () => { @@ -74,7 +74,7 @@ describe('stackUtils', () => { const data: AlignedData = [[0], [1], [2], [3], [4]]; const omitSeries2 = (i: number): boolean => i === 2; const { bands } = stack(data, omitSeries2); - expect(bands).toEqual([{ series: [1, 3] }, { series: [3, 4] }]); + expect(bands).toStrictEqual([{ series: [1, 3] }, { series: [3, 4] }]); }); it('returns empty bands when only one value series', () => { @@ -83,30 +83,30 @@ describe('stackUtils', () => { [1, 2], ]; const { bands } = stack(data, neverOmit); - expect(bands).toEqual([]); + expect(bands).toStrictEqual([]); }); }); describe('getInitialStackedBands', () => { it('returns one band between each consecutive pair for seriesCount 3', () => { - expect(getInitialStackedBands(3)).toEqual([ + expect(getInitialStackedBands(3)).toStrictEqual([ { series: [1, 2] }, { series: [2, 3] }, ]); }); it('returns empty array for seriesCount 0 or 1', () => { - expect(getInitialStackedBands(0)).toEqual([]); - expect(getInitialStackedBands(1)).toEqual([]); + expect(getInitialStackedBands(0)).toStrictEqual([]); + expect(getInitialStackedBands(1)).toStrictEqual([]); }); it('returns single band for seriesCount 2', () => { - expect(getInitialStackedBands(2)).toEqual([{ series: [1, 2] }]); + expect(getInitialStackedBands(2)).toStrictEqual([{ series: [1, 2] }]); }); it('returns bands [1,2], [2,3], ..., [n-1, n] for seriesCount n', () => { const bands = getInitialStackedBands(5); - expect(bands).toEqual([ + expect(bands).toStrictEqual([ { series: [1, 2] }, { series: [2, 3] }, { series: [3, 4] }, diff --git a/frontend/src/container/DashboardContainer/visualization/hooks/__tests__/useBarChartStacking.test.ts b/frontend/src/container/DashboardContainer/visualization/hooks/__tests__/useBarChartStacking.test.ts index 3ea51be4a3..024c468f2d 100644 --- a/frontend/src/container/DashboardContainer/visualization/hooks/__tests__/useBarChartStacking.test.ts +++ b/frontend/src/container/DashboardContainer/visualization/hooks/__tests__/useBarChartStacking.test.ts @@ -119,9 +119,9 @@ describe('useBarChartStacking', () => { }), ); // Still returns stacked data (computed in useMemo); no hooks registered - expect(result.current[0]).toEqual([0, 1]); - expect(result.current[1]).toEqual([5, 7]); // stacked - expect(result.current[2]).toEqual([4, 5]); + expect(result.current[0]).toStrictEqual([0, 1]); + expect(result.current[1]).toStrictEqual([5, 7]); // stacked + expect(result.current[2]).toStrictEqual([4, 5]); }); it('returns stacked data when isStackedBarChart is true and multiple value series', () => { @@ -138,10 +138,10 @@ describe('useBarChartStacking', () => { config: null, }), ); - expect(result.current[0]).toEqual([0, 1, 2]); - expect(result.current[1]).toEqual([12, 15, 18]); // s1+s2+s3 - expect(result.current[2]).toEqual([11, 13, 15]); // s2+s3 - expect(result.current[3]).toEqual([7, 8, 9]); + expect(result.current[0]).toStrictEqual([0, 1, 2]); + expect(result.current[1]).toStrictEqual([12, 15, 18]); // s1+s2+s3 + expect(result.current[2]).toStrictEqual([11, 13, 15]); // s2+s3 + expect(result.current[3]).toStrictEqual([7, 8, 9]); }); it('returns data as-is when only one value series (no stacking needed)', () => { @@ -156,7 +156,7 @@ describe('useBarChartStacking', () => { config: null, }), ); - expect(result.current).toEqual(data); + expect(result.current).toStrictEqual(data); }); it('registers setData and setSeries hooks when isStackedBarChart and config provided', () => { diff --git a/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/__tests__/utils.test.ts b/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/__tests__/utils.test.ts index 4131c24d48..66ae5533f6 100644 --- a/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/__tests__/utils.test.ts +++ b/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/__tests__/utils.test.ts @@ -83,7 +83,7 @@ describe('TimeSeriesPanel utils', () => { const data = prepareChartData(apiResponse); expect(data).toHaveLength(1); - expect(data[0]).toEqual([]); + expect(data[0]).toStrictEqual([]); }); it('returns timestamps and one series of y values for single series', () => { @@ -102,8 +102,8 @@ describe('TimeSeriesPanel utils', () => { const data = prepareChartData(apiResponse); expect(data).toHaveLength(2); - expect(data[0]).toEqual([1000, 2000]); - expect(data[1]).toEqual([10, 20]); + expect(data[0]).toStrictEqual([1000, 2000]); + expect(data[1]).toStrictEqual([10, 20]); }); it('merges timestamps and fills missing values with null for multiple series', () => { @@ -128,11 +128,11 @@ describe('TimeSeriesPanel utils', () => { const data = prepareChartData(apiResponse); - expect(data[0]).toEqual([1000, 2000, 3000]); + expect(data[0]).toStrictEqual([1000, 2000, 3000]); // First series: 1, null, 3 - expect(data[1]).toEqual([1, null, 3]); + expect(data[1]).toStrictEqual([1, null, 3]); // Second series: 10, 20, null - expect(data[2]).toEqual([10, 20, null]); + expect(data[2]).toStrictEqual([10, 20, null]); }); }); diff --git a/frontend/src/container/DashboardContainer/visualization/panels/utils/__tests__/legendVisibilityUtils.test.ts b/frontend/src/container/DashboardContainer/visualization/panels/utils/__tests__/legendVisibilityUtils.test.ts index 2af056126a..9e209b4a21 100644 --- a/frontend/src/container/DashboardContainer/visualization/panels/utils/__tests__/legendVisibilityUtils.test.ts +++ b/frontend/src/container/DashboardContainer/visualization/panels/utils/__tests__/legendVisibilityUtils.test.ts @@ -62,7 +62,7 @@ describe('legendVisibilityUtils', () => { const result = getStoredSeriesVisibility('widget-1'); expect(result).not.toBeNull(); - expect(result).toEqual([ + expect(result).toStrictEqual([ { label: 'CPU', show: true }, { label: 'Memory', show: false }, ]); @@ -85,7 +85,7 @@ describe('legendVisibilityUtils', () => { const result = getStoredSeriesVisibility('widget-1'); expect(result).not.toBeNull(); - expect(result).toEqual([ + expect(result).toStrictEqual([ { label: 'CPU', show: true }, { label: 'CPU', show: false }, { label: 'Memory', show: false }, @@ -128,7 +128,7 @@ describe('legendVisibilityUtils', () => { const stored = getStoredSeriesVisibility('widget-1'); expect(stored).not.toBeNull(); - expect(stored).toEqual([ + expect(stored).toStrictEqual([ { label: 'CPU', show: true }, { label: 'Memory', show: false }, ]); @@ -150,7 +150,7 @@ describe('legendVisibilityUtils', () => { const stored = getStoredSeriesVisibility('widget-new'); expect(stored).not.toBeNull(); - expect(stored).toEqual([{ label: 'CPU', show: false }]); + expect(stored).toStrictEqual([{ label: 'CPU', show: false }]); }); it('updates existing widget visibility when entry already exists', () => { @@ -176,7 +176,7 @@ describe('legendVisibilityUtils', () => { const stored = getStoredSeriesVisibility('widget-1'); expect(stored).not.toBeNull(); - expect(stored).toEqual([ + expect(stored).toStrictEqual([ { label: 'CPU', show: false }, { label: 'Memory', show: true }, ]); @@ -202,7 +202,7 @@ describe('legendVisibilityUtils', () => { const stored = getStoredSeriesVisibility('widget-1'); expect(stored).not.toBeNull(); - expect(stored).toEqual([ + expect(stored).toStrictEqual([ { label: 'x-axis', show: true }, { label: 'CPU', show: false }, ]); @@ -232,10 +232,10 @@ describe('legendVisibilityUtils', () => { { label: 'B', show: true }, ]); - expect(getStoredSeriesVisibility('widget-a')).toEqual([ + expect(getStoredSeriesVisibility('widget-a')).toStrictEqual([ { label: 'A', show: true }, ]); - expect(getStoredSeriesVisibility('widget-b')).toEqual([ + expect(getStoredSeriesVisibility('widget-b')).toStrictEqual([ { label: 'B', show: true }, ]); }); @@ -252,7 +252,7 @@ describe('legendVisibilityUtils', () => { ); const [_, value] = (localStorage.setItem as jest.Mock).mock.calls[0]; expect((): void => JSON.parse(value)).not.toThrow(); - expect(JSON.parse(value)).toEqual([ + expect(JSON.parse(value)).toStrictEqual([ { name: 'widget-1', dataIndex: [{ label: 'CPU', show: true }] }, ]); }); @@ -263,7 +263,7 @@ describe('legendVisibilityUtils', () => { const raw = localStorage.getItem(storageKey); expect(raw).not.toBeNull(); const parsed = JSON.parse(raw ?? '[]'); - expect(parsed).toEqual([{ name: 'widget-1', dataIndex: [] }]); + expect(parsed).toStrictEqual([{ name: 'widget-1', dataIndex: [] }]); expect(getStoredSeriesVisibility('widget-1')).toBeNull(); }); }); diff --git a/frontend/src/container/ForgotPassword/__tests__/ForgotPassword.test.tsx b/frontend/src/container/ForgotPassword/__tests__/ForgotPassword.test.tsx index d37827729a..ed25b1dd51 100644 --- a/frontend/src/container/ForgotPassword/__tests__/ForgotPassword.test.tsx +++ b/frontend/src/container/ForgotPassword/__tests__/ForgotPassword.test.tsx @@ -188,7 +188,9 @@ describe('ForgotPassword Component', () => { const submitButton = screen.getByTestId('forgot-password-submit'); await user.click(submitButton); - expect(await screen.findByText(/check your email/i)).toBeInTheDocument(); + await expect( + screen.findByText(/check your email/i), + ).resolves.toBeInTheDocument(); expect( screen.getByText(/we've sent a password reset link/i), ).toBeInTheDocument(); @@ -208,7 +210,9 @@ describe('ForgotPassword Component', () => { const submitButton = screen.getByTestId('forgot-password-submit'); await user.click(submitButton); - expect(await screen.findByTestId('back-to-login')).toBeInTheDocument(); + await expect( + screen.findByTestId('back-to-login'), + ).resolves.toBeInTheDocument(); }); it('redirects to login when clicking back to login on success screen', async () => { @@ -225,7 +229,9 @@ describe('ForgotPassword Component', () => { const submitButton = screen.getByTestId('forgot-password-submit'); await user.click(submitButton); - expect(await screen.findByTestId('back-to-login')).toBeInTheDocument(); + await expect( + screen.findByTestId('back-to-login'), + ).resolves.toBeInTheDocument(); const backToLoginButton = screen.getByTestId('back-to-login'); await user.click(backToLoginButton); @@ -250,7 +256,9 @@ describe('ForgotPassword Component', () => { const submitButton = screen.getByTestId('forgot-password-submit'); await user.click(submitButton); - expect(await screen.findByText(/user not found/i)).toBeInTheDocument(); + await expect( + screen.findByText(/user not found/i), + ).resolves.toBeInTheDocument(); }); it('displays error message when API returns server error', async () => { @@ -263,9 +271,9 @@ describe('ForgotPassword Component', () => { const submitButton = screen.getByTestId('forgot-password-submit'); await user.click(submitButton); - expect( - await screen.findByText(/internal server error occurred/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/internal server error occurred/i), + ).resolves.toBeInTheDocument(); }); it('clears error message on new submission attempt', async () => { @@ -295,7 +303,9 @@ describe('ForgotPassword Component', () => { const submitButton = screen.getByTestId('forgot-password-submit'); await user.click(submitButton); - expect(await screen.findByText(/user not found/i)).toBeInTheDocument(); + await expect( + screen.findByText(/user not found/i), + ).resolves.toBeInTheDocument(); // Click submit again await user.click(submitButton); @@ -303,7 +313,9 @@ describe('ForgotPassword Component', () => { await waitFor(() => { expect(screen.queryByText(/user not found/i)).not.toBeInTheDocument(); }); - expect(await screen.findByText(/check your email/i)).toBeInTheDocument(); + await expect( + screen.findByText(/check your email/i), + ).resolves.toBeInTheDocument(); }); }); @@ -336,7 +348,9 @@ describe('ForgotPassword Component', () => { await user.click(submitButton); // Button should show loading state - expect(await screen.findByText(/sending\.\.\./i)).toBeInTheDocument(); + await expect( + screen.findByText(/sending\.\.\./i), + ).resolves.toBeInTheDocument(); }); it('disables submit button during loading', async () => { diff --git a/frontend/src/container/FormAlertRules/__tests__/usePrefillAlertConditions.test.ts b/frontend/src/container/FormAlertRules/__tests__/usePrefillAlertConditions.test.ts index 6dbdcea6d9..073771074f 100644 --- a/frontend/src/container/FormAlertRules/__tests__/usePrefillAlertConditions.test.ts +++ b/frontend/src/container/FormAlertRules/__tests__/usePrefillAlertConditions.test.ts @@ -76,7 +76,7 @@ describe('usePrefillAlertConditions', () => { builder: { queryData: [{ reduceTo: 'p90' }] }, } as any), ); - expect(result.current.matchType).toBe(null); + expect(result.current.matchType).toBeNull(); }); it('returns the correct matchType for multiple queries with same time aggregation', () => { @@ -112,7 +112,7 @@ describe('usePrefillAlertConditions', () => { }, } as any), ); - expect(result.current.matchType).toBe(null); + expect(result.current.matchType).toBeNull(); }); it('returns the correct op, target, targetUnit from the higher priority threshold for multiple thresholds', () => { diff --git a/frontend/src/container/FormAlertRules/utils.test.ts b/frontend/src/container/FormAlertRules/utils.test.ts index 49acf94bc1..a8b0ffb68e 100644 --- a/frontend/src/container/FormAlertRules/utils.test.ts +++ b/frontend/src/container/FormAlertRules/utils.test.ts @@ -5,10 +5,10 @@ import { getUpdatedStepInterval } from './utils'; describe('getUpdatedStepInterval', () => { it('should return 60', () => { const result = getUpdatedStepInterval('5m0s'); - expect(result).toEqual(60); + expect(result).toBe(60); }); it('should return 60 for 10m0s', () => { const result = getUpdatedStepInterval('10m0s'); - expect(result).toEqual(60); + expect(result).toBe(60); }); }); diff --git a/frontend/src/container/GridTableComponent/__tests__/utils.test.tsx b/frontend/src/container/GridTableComponent/__tests__/utils.test.tsx index ed3ba194d1..7f00572ec5 100644 --- a/frontend/src/container/GridTableComponent/__tests__/utils.test.tsx +++ b/frontend/src/container/GridTableComponent/__tests__/utils.test.tsx @@ -44,7 +44,7 @@ describe('Table Panel utils', () => { expect(getQueryLegend(query, 'A')).toBe('p99'); // should return undefined when legend not present - expect(getQueryLegend(query, 'B')).toBe(undefined); + expect(getQueryLegend(query, 'B')).toBeUndefined(); }); it('sorter function for table sorting', () => { diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/__tests__/EntityLogs.test.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/__tests__/EntityLogs.test.tsx index 9de0738862..15235dd771 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/__tests__/EntityLogs.test.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/__tests__/EntityLogs.test.tsx @@ -108,7 +108,7 @@ describe('EntityLogs', () => { }); await waitFor(() => { - expect(capturedQueryRangePayloads.length).toBe(1); + expect(capturedQueryRangePayloads).toHaveLength(1); }); await waitFor(async () => { @@ -131,7 +131,7 @@ describe('EntityLogs', () => { }); await waitFor(() => { - expect(capturedQueryRangePayloads.length).toBe(2); + expect(capturedQueryRangePayloads).toHaveLength(2); }); const firstPayload = capturedQueryRangePayloads[0]; diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/__tests__/ServiceDetailsS3Sync.test.tsx b/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/__tests__/ServiceDetailsS3Sync.test.tsx index c75b92c532..9b34c134b4 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/__tests__/ServiceDetailsS3Sync.test.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/__tests__/ServiceDetailsS3Sync.test.tsx @@ -149,7 +149,7 @@ describe('ServiceDetails for S3 Sync service', () => { expect(capturedPayload).not.toBeNull(); }); - expect(capturedPayload).toEqual({ + expect(capturedPayload).toStrictEqual({ config: { aws: { logs: { diff --git a/frontend/src/container/ListAlertRules/__test__/utils.test.ts b/frontend/src/container/ListAlertRules/__test__/utils.test.ts index 65b94dd5b1..07a1e67bb0 100644 --- a/frontend/src/container/ListAlertRules/__test__/utils.test.ts +++ b/frontend/src/container/ListAlertRules/__test__/utils.test.ts @@ -68,12 +68,12 @@ describe('filterAlerts', () => { it('should return all alerts when filter is empty', () => { const result = filterAlerts(mockAlerts, ''); - expect(result).toEqual(mockAlerts); + expect(result).toStrictEqual(mockAlerts); }); it('should return all alerts when filter is only whitespace', () => { const result = filterAlerts(mockAlerts, ' '); - expect(result).toEqual(mockAlerts); + expect(result).toStrictEqual(mockAlerts); }); it('should filter alerts by alert name', () => { diff --git a/frontend/src/container/LogDetailedView/ContextView/__tests__/ContextLogRenderer.test.tsx b/frontend/src/container/LogDetailedView/ContextView/__tests__/ContextLogRenderer.test.tsx index 0d63c3d80c..8507eec491 100644 --- a/frontend/src/container/LogDetailedView/ContextView/__tests__/ContextLogRenderer.test.tsx +++ b/frontend/src/container/LogDetailedView/ContextView/__tests__/ContextLogRenderer.test.tsx @@ -251,20 +251,20 @@ describe('ContextLogRenderer', () => { const afterQuery = (afterPayload.compositeQuery as any).queries[0].spec; // Verify timestamps remain constant - expect(afterStart).toEqual(initialPayload.start); - expect(afterEnd).toEqual(initialPayload.end); + expect(afterStart).toStrictEqual(initialPayload.start); + expect(afterEnd).toStrictEqual(initialPayload.end); // Verify offset changes - expect(initialQuery.offset).toEqual(0); - expect(afterQuery.offset).toEqual(10); + expect(initialQuery.offset).toBe(0); + expect(afterQuery.offset).toBe(10); // Verify filter changes expect(initialQuery.filter.expression).toContain(expectedOpChange.before); expect(afterQuery.filter.expression).toContain(expectedOpChange.after); // Verify query structure remains consistent - expect(initialQuery.name).toEqual(afterQuery.name); - expect(initialQuery.signal).toEqual(afterQuery.signal); + expect(initialQuery.name).toStrictEqual(afterQuery.name); + expect(initialQuery.signal).toStrictEqual(afterQuery.signal); }; it('should keep the start and end timestamps constant on clicking load more (prev / next) pages', async () => { diff --git a/frontend/src/container/LogDetailedView/util.test.ts b/frontend/src/container/LogDetailedView/util.test.ts index f2131c253b..3a16e253a4 100644 --- a/frontend/src/container/LogDetailedView/util.test.ts +++ b/frontend/src/container/LogDetailedView/util.test.ts @@ -10,20 +10,20 @@ import { describe('recursiveParseJSON', () => { it('should return an empty object if the input is not valid JSON', () => { const result = recursiveParseJSON('not valid JSON'); - expect(result).toEqual({}); + expect(result).toStrictEqual({}); }); it('should return the parsed JSON object for valid JSON input', () => { const jsonString = '{"name": "John", "age": 30}'; const result = recursiveParseJSON(jsonString); - expect(result).toEqual({ name: 'John', age: 30 }); + expect(result).toStrictEqual({ name: 'John', age: 30 }); }); it('should recursively parse nested JSON objects', () => { const jsonString = '{"name": "John", "age": 30, "address": {"street": "123 Main St", "city": "Anytown", "state": "CA"}}'; const result = recursiveParseJSON(jsonString); - expect(result).toEqual({ + expect(result).toStrictEqual({ name: 'John', age: 30, address: { @@ -37,19 +37,21 @@ describe('recursiveParseJSON', () => { it('should recursively parse nested JSON arrays', () => { const jsonString = '[1, 2, [3, 4], {"foo": "bar"}]'; const result = recursiveParseJSON(jsonString); - expect(result).toEqual([1, 2, [3, 4], { foo: 'bar' }]); + expect(result).toStrictEqual([1, 2, [3, 4], { foo: 'bar' }]); }); it('should recursively parse deeply nested JSON objects', () => { const jsonString = '{"foo": {"bar": {"baz": {"qux": {"value": 42}}}}}'; const result = recursiveParseJSON(jsonString); - expect(result).toEqual({ foo: { bar: { baz: { qux: { value: 42 } } } } }); + expect(result).toStrictEqual({ + foo: { bar: { baz: { qux: { value: 42 } } } }, + }); }); it('should handle JSON input that contains escaped characters', () => { const jsonString = '{"name": "John\\", \\"Doe", "age": 30}'; const result = recursiveParseJSON(jsonString); - expect(result).toEqual({ name: 'John", "Doe', age: 30 }); + expect(result).toStrictEqual({ name: 'John", "Doe', age: 30 }); }); }); @@ -72,14 +74,14 @@ describe('flattenObject in the objects recursively', () => { f: 4, }; - expect(flattenObject(nestedObj)).toEqual(expected); + expect(flattenObject(nestedObj)).toStrictEqual(expected); }); it('should return an empty object when input is empty', () => { const nestedObj = {}; const expected = {}; - expect(flattenObject(nestedObj)).toEqual(expected); + expect(flattenObject(nestedObj)).toStrictEqual(expected); }); it('should handle non-nested objects correctly', () => { @@ -94,7 +96,7 @@ describe('flattenObject in the objects recursively', () => { c: 3, }; - expect(flattenObject(nestedObj)).toEqual(expected); + expect(flattenObject(nestedObj)).toStrictEqual(expected); }); it('should handle null and undefined correctly', () => { @@ -107,7 +109,7 @@ describe('flattenObject in the objects recursively', () => { b: undefined, }; - expect(flattenObject(nestedObj)).toEqual(expected); + expect(flattenObject(nestedObj)).toStrictEqual(expected); }); it('should handle arrays correctly', () => { @@ -120,7 +122,7 @@ describe('flattenObject in the objects recursively', () => { b: 2, }; - expect(flattenObject(objWithArray)).toEqual(expected); + expect(flattenObject(objWithArray)).toStrictEqual(expected); }); it('should handle nested objects in arrays correctly', () => { @@ -133,7 +135,7 @@ describe('flattenObject in the objects recursively', () => { d: 3, }; - expect(flattenObject(objWithArray)).toEqual(expected); + expect(flattenObject(objWithArray)).toStrictEqual(expected); }); it('should handle objects with arrays and nested objects correctly', () => { @@ -150,7 +152,7 @@ describe('flattenObject in the objects recursively', () => { e: 5, }; - expect(flattenObject(complexObj)).toEqual(expected); + expect(flattenObject(complexObj)).toStrictEqual(expected); }); }); diff --git a/frontend/src/container/LogDetailedView/utils.tsx b/frontend/src/container/LogDetailedView/utils.tsx index c021257667..65d22f7bac 100644 --- a/frontend/src/container/LogDetailedView/utils.tsx +++ b/frontend/src/container/LogDetailedView/utils.tsx @@ -287,7 +287,7 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => { outputJson.scope = outputJson.scope || {}; Object.assign(outputJson.scope, logData[key as keyof ILog]); } else { - // @ts-ignore + // @ts-expect-error outputJson[key] = logData[key as keyof ILog]; } }); diff --git a/frontend/src/container/LogExplorerQuerySection/LogExplorerQuerySection.test.tsx b/frontend/src/container/LogExplorerQuerySection/LogExplorerQuerySection.test.tsx index abbe018434..181408bad1 100644 --- a/frontend/src/container/LogExplorerQuerySection/LogExplorerQuerySection.test.tsx +++ b/frontend/src/container/LogExplorerQuerySection/LogExplorerQuerySection.test.tsx @@ -256,7 +256,7 @@ describe('LogExplorerQuerySection', () => { VIEWS_TO_TEST.forEach((view) => { rerender(); - expect(mockQueryBuilderContext.currentQuery).toEqual(initialQuery); + expect(mockQueryBuilderContext.currentQuery).toStrictEqual(initialQuery); }); }); diff --git a/frontend/src/container/LogsExplorerContext/__tests__/useInitialQuery.test.ts b/frontend/src/container/LogsExplorerContext/__tests__/useInitialQuery.test.ts index 15ad7eb1e4..50d05d6374 100644 --- a/frontend/src/container/LogsExplorerContext/__tests__/useInitialQuery.test.ts +++ b/frontend/src/container/LogsExplorerContext/__tests__/useInitialQuery.test.ts @@ -350,7 +350,7 @@ describe('useInitialQuery - Priority-Based Resource Filtering', () => { const serviceItems = calledWith.items.filter( (item: TagFilterItem) => item.key?.key === 'service.name', ); - expect(serviceItems.length).toBe(1); + expect(serviceItems).toHaveLength(1); // Verify no priority items (k8s, cloud, host, container) are included const priorityItems = calledWith.items.filter( diff --git a/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx b/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx index 7b43027216..7f7974fc0a 100644 --- a/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx +++ b/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx @@ -255,7 +255,7 @@ describe.skip('LogsExplorerViews Pagination', () => { expect( screen.queryByText('pending_data_placeholder'), ).not.toBeInTheDocument(); - expect(capturedPayloads.length).toBe(1); + expect(capturedPayloads).toHaveLength(1); }); // Verify the payload of the first call, expecting offset 0 @@ -285,7 +285,7 @@ describe.skip('LogsExplorerViews Pagination', () => { // Verify the second page request was made // Wait for the second API call to be captured after the scroll await waitFor(() => { - expect(capturedPayloads.length).toBe(2); + expect(capturedPayloads).toHaveLength(2); }); // Store the time range from the first payload, which should be consistent in subsequent requests @@ -324,7 +324,7 @@ describe.skip('LogsExplorerViews Pagination', () => { // Verify the third page request was made // Wait for the third API call to be captured await waitFor(() => { - expect(capturedPayloads.length).toBe(3); + expect(capturedPayloads).toHaveLength(3); }); const thirdPayload = capturedPayloads[2]; // Verify the payload of the third call, expecting offset 200 and consistent time range @@ -526,13 +526,13 @@ describe('Logs Explorer -> stage and run query', () => { const secondPayload = capturedPayloads[capturedPayloads.length - 1]; // Verify that the timestamps have changed due to UpdateTimeInterval - expect(secondPayload.start).not.toEqual(initialStart); - expect(secondPayload.end).not.toEqual(initialEnd); + expect(secondPayload.start).not.toStrictEqual(initialStart); + expect(secondPayload.end).not.toStrictEqual(initialEnd); // The timestamps should be different (the exact difference depends on the mock implementation) // Note: The timestamps might go backwards if UpdateTimeInterval is not called properly - expect(secondPayload.start).not.toEqual(initialStart); - expect(secondPayload.end).not.toEqual(initialEnd); + expect(secondPayload.start).not.toStrictEqual(initialStart); + expect(secondPayload.end).not.toStrictEqual(initialEnd); // Verify that the IDs have changed (this confirms the Stage & Run Query button worked) expect(currentStagedQuery.id).not.toBe(initialStagedQueryId); diff --git a/frontend/src/container/LogsExplorerViews/tests/LogsExplorerViews.test.tsx b/frontend/src/container/LogsExplorerViews/tests/LogsExplorerViews.test.tsx index 637c0b9a3f..90ad3dfb63 100644 --- a/frontend/src/container/LogsExplorerViews/tests/LogsExplorerViews.test.tsx +++ b/frontend/src/container/LogsExplorerViews/tests/LogsExplorerViews.test.tsx @@ -183,7 +183,7 @@ describe('LogsExplorerViews -', () => { // Test that the menu items are present const expectedMenuItemsCount = 3; const menuItems = document.querySelectorAll('.menu-items .item'); - expect(menuItems.length).toBe(expectedMenuItemsCount); + expect(menuItems).toHaveLength(expectedMenuItemsCount); // Test that the component renders without crashing expect(queryByTestId(periscopeDownloadButtonTestId)).toBeInTheDocument(); @@ -429,7 +429,7 @@ describe('LogsExplorerViews -', () => { expect(first.groupBy?.length ?? 0).toBe(0); expect(first.having?.expression).toBe(''); // Default orderBy should be timestamp desc, then id desc - expect(first.orderBy).toEqual([ + expect(first.orderBy).toStrictEqual([ { columnName: 'timestamp', order: 'desc' }, { columnName: 'id', order: 'desc' }, ]); diff --git a/frontend/src/container/MembersSettings/__tests__/MembersSettings.integration.test.tsx b/frontend/src/container/MembersSettings/__tests__/MembersSettings.integration.test.tsx index e0afcf169b..a3eb913d21 100644 --- a/frontend/src/container/MembersSettings/__tests__/MembersSettings.integration.test.tsx +++ b/frontend/src/container/MembersSettings/__tests__/MembersSettings.integration.test.tsx @@ -135,8 +135,8 @@ describe('MembersSettings (integration)', () => { await user.click(screen.getByRole('button', { name: /invite member/i })); - expect(await screen.findAllByPlaceholderText('john@signoz.io')).toHaveLength( - 3, - ); + await expect( + screen.findAllByPlaceholderText('john@signoz.io'), + ).resolves.toHaveLength(3); }); }); diff --git a/frontend/src/container/MetricsApplication/__tests__/TopOperationsTable.test.tsx b/frontend/src/container/MetricsApplication/__tests__/TopOperationsTable.test.tsx index 32343ed9f6..74a0d90755 100644 --- a/frontend/src/container/MetricsApplication/__tests__/TopOperationsTable.test.tsx +++ b/frontend/src/container/MetricsApplication/__tests__/TopOperationsTable.test.tsx @@ -164,7 +164,7 @@ describe('TopOperation API Integration', () => { // Verify that only the top_operations endpoint was called expect(apiCalls).toHaveLength(1); expect(apiCalls[0].endpoint).toBe(TOP_OPERATIONS_ENDPOINT); - expect(apiCalls[0].body).toEqual({ + expect(apiCalls[0].body).toStrictEqual({ start: `${defaultApiCallExpectation.start}`, end: `${defaultApiCallExpectation.end}`, service: defaultApiCallExpectation.service, @@ -198,7 +198,7 @@ describe('TopOperation API Integration', () => { // Verify that the entry_point_operations endpoint was called expect(apiCalls).toHaveLength(1); expect(apiCalls[0].endpoint).toBe(ENTRY_POINT_OPERATIONS_ENDPOINT); - expect(apiCalls[0].body).toEqual({ + expect(apiCalls[0].body).toStrictEqual({ start: `${defaultApiCallExpectation.start}`, end: `${defaultApiCallExpectation.end}`, service: defaultApiCallExpectation.service, diff --git a/frontend/src/container/MetricsApplication/utils.test.ts b/frontend/src/container/MetricsApplication/utils.test.ts index 7ad338d850..b25b397336 100644 --- a/frontend/src/container/MetricsApplication/utils.test.ts +++ b/frontend/src/container/MetricsApplication/utils.test.ts @@ -7,7 +7,7 @@ import { } from './utils'; describe('Error Rate', () => { - test('should return correct error rate', () => { + it('should return correct error rate', () => { const list: TopOperationList = getTopOperationList({ errorCount: 10, numCalls: 100, @@ -16,35 +16,35 @@ describe('Error Rate', () => { expect(getErrorRate(list)).toBe(10); }); - test('should handle no errors gracefully', () => { + it('should handle no errors gracefully', () => { const list = getTopOperationList({ errorCount: 0, numCalls: 100 }); expect(getErrorRate(list)).toBe(0); }); - test('should handle zero calls', () => { + it('should handle zero calls', () => { const list = getTopOperationList({ errorCount: 0, numCalls: 0 }); expect(getErrorRate(list)).toBe(0); }); }); describe('getNearestHighestBucketValue', () => { - test('should return nearest higher bucket value', () => { + it('should return nearest higher bucket value', () => { expect(getNearestHighestBucketValue(50, [10, 20, 30, 40, 60, 70])).toBe('60'); }); - test('should return +Inf for value higher than any bucket', () => { + it('should return +Inf for value higher than any bucket', () => { expect(getNearestHighestBucketValue(80, [10, 20, 30, 40, 60, 70])).toBe( '+Inf', ); }); - test('should return the first bucket for value lower than all buckets', () => { + it('should return the first bucket for value lower than all buckets', () => { expect(getNearestHighestBucketValue(5, [10, 20, 30, 40, 60, 70])).toBe('10'); }); }); describe('convertedTracesToDownloadData', () => { - test('should convert trace data correctly', () => { + it('should convert trace data correctly', () => { const data = [ { name: 'op1', @@ -56,7 +56,7 @@ describe('convertedTracesToDownloadData', () => { }, ]; - expect(convertedTracesToDownloadData(data)).toEqual([ + expect(convertedTracesToDownloadData(data)).toStrictEqual([ { Name: 'op1', 'P50 (in ms)': '50.00', diff --git a/frontend/src/container/MetricsExplorer/Explorer/__tests__/TimeSeries.test.tsx b/frontend/src/container/MetricsExplorer/Explorer/__tests__/TimeSeries.test.tsx index 294a189da7..5a1f59768f 100644 --- a/frontend/src/container/MetricsExplorer/Explorer/__tests__/TimeSeries.test.tsx +++ b/frontend/src/container/MetricsExplorer/Explorer/__tests__/TimeSeries.test.tsx @@ -130,7 +130,9 @@ describe('TimeSeries', () => { const alertIcon = screen.getByRole('img', { name: 'no unit warning' }); await user.hover(alertIcon); - expect(await screen.findByText('metric details')).toBeInTheDocument(); + await expect( + screen.findByText('metric details'), + ).resolves.toBeInTheDocument(); }); it('shows save unit prompt with enabled button when metric has no unit and a unit is selected', async () => { @@ -142,9 +144,9 @@ describe('TimeSeries', () => { showYAxisUnitSelector: true, }); - expect( - await screen.findByText('Set the selected unit as the metric unit?'), - ).toBeInTheDocument(); + await expect( + screen.findByText('Set the selected unit as the metric unit?'), + ).resolves.toBeInTheDocument(); const yesButton = screen.getByRole('button', { name: 'Yes' }); expect(yesButton).toBeEnabled(); diff --git a/frontend/src/container/MetricsExplorer/Explorer/__tests__/utils.test.tsx b/frontend/src/container/MetricsExplorer/Explorer/__tests__/utils.test.tsx index 31789d84af..9a46f41322 100644 --- a/frontend/src/container/MetricsExplorer/Explorer/__tests__/utils.test.tsx +++ b/frontend/src/container/MetricsExplorer/Explorer/__tests__/utils.test.tsx @@ -61,24 +61,24 @@ describe('splitQueryIntoOneChartPerQuery', () => { expect(result).toHaveLength(4); // Verify query 1 has the correct data expect(result[0].builder.queryData).toHaveLength(1); - expect(result[0].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_1); + expect(result[0].builder.queryData[0]).toStrictEqual(MOCK_QUERY_DATA_1); expect(result[0].builder.queryFormulas).toHaveLength(0); expect(result[0].unit).toBeUndefined(); // Verify query 2 has the correct data expect(result[1].builder.queryData).toHaveLength(1); - expect(result[1].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_2); + expect(result[1].builder.queryData[0]).toStrictEqual(MOCK_QUERY_DATA_2); expect(result[1].builder.queryFormulas).toHaveLength(0); expect(result[1].unit).toBe('unit2'); // Verify query 3 has the correct data expect(result[2].builder.queryFormulas).toHaveLength(1); - expect(result[2].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA); + expect(result[2].builder.queryFormulas[0]).toStrictEqual(MOCK_FORMULA_DATA); expect(result[2].builder.queryData).toHaveLength(2); // 2 disabled queries expect(result[2].builder.queryData[0].disabled).toBe(true); expect(result[2].builder.queryData[1].disabled).toBe(true); expect(result[2].unit).toBe(''); // Verify query 4 has the correct data expect(result[3].builder.queryFormulas).toHaveLength(1); - expect(result[3].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA); + expect(result[3].builder.queryFormulas[0]).toStrictEqual(MOCK_FORMULA_DATA); expect(result[3].builder.queryData).toHaveLength(2); // 2 disabled queries expect(result[3].builder.queryData[0].disabled).toBe(true); expect(result[3].builder.queryData[1].disabled).toBe(true); @@ -106,7 +106,7 @@ describe('useGetMetrics', () => { const { result } = renderHook(() => useGetMetrics(['metric1'])); expect(result.current.metrics).toHaveLength(1); expect(result.current.metrics[0]).toBeDefined(); - expect(result.current.metrics[0]).toEqual(MOCK_METRIC_METADATA); + expect(result.current.metrics[0]).toStrictEqual(MOCK_METRIC_METADATA); expect(result.current.isLoading).toBe(false); expect(result.current.isError).toBe(false); }); @@ -131,7 +131,7 @@ describe('getMetricUnits', () => { it('should return the same unit for units that are not known to the universal unit mapper', () => { const result = getMetricUnits([MOCK_METRIC_METADATA]); expect(result).toHaveLength(1); - expect(result[0]).toEqual(MOCK_METRIC_METADATA.unit); + expect(result[0]).toStrictEqual(MOCK_METRIC_METADATA.unit); }); it('should return universal unit for units that are known to the universal unit mapper', () => { diff --git a/frontend/src/container/NewWidget/RightContainer/ContextLinks/__test__/ContextLinks.test.tsx b/frontend/src/container/NewWidget/RightContainer/ContextLinks/__test__/ContextLinks.test.tsx index 6486b78373..981ad8ea17 100644 --- a/frontend/src/container/NewWidget/RightContainer/ContextLinks/__test__/ContextLinks.test.tsx +++ b/frontend/src/container/NewWidget/RightContainer/ContextLinks/__test__/ContextLinks.test.tsx @@ -157,7 +157,7 @@ describe('ContextLinks Component', () => { // Test the function by calling it with the current state const result = setContextLinksCall(MOCK_EMPTY_CONTEXT_LINKS); - expect(result).toEqual({ + expect(result).toStrictEqual({ linksData: [ { id: expect.any(String), // ID is generated dynamically diff --git a/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx b/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx index 25a908ddd9..d676078ac3 100644 --- a/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx +++ b/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx @@ -93,7 +93,7 @@ jest.mock('react-router-dom-v5-compat', () => ({ describe('placeWidgetAtBottom', () => { it('should place widget at (0,0) when layout is empty', () => { const result = placeWidgetAtBottom('widget1', []); - expect(result).toEqual({ + expect(result).toStrictEqual({ i: 'widget1', x: 0, y: 0, @@ -104,7 +104,7 @@ describe('placeWidgetAtBottom', () => { it('should place widget at (0,0) with custom dimensions when layout is empty', () => { const result = placeWidgetAtBottom('widget1', [], 4, 8); - expect(result).toEqual({ + expect(result).toStrictEqual({ i: 'widget1', x: 0, y: 0, @@ -116,7 +116,7 @@ describe('placeWidgetAtBottom', () => { it('should place widget next to existing widget in last row if space available', () => { const existingLayout = [{ i: 'widget1', x: 0, y: 0, w: 6, h: 6 }]; const result = placeWidgetAtBottom('widget2', existingLayout); - expect(result).toEqual({ + expect(result).toStrictEqual({ i: 'widget2', x: 6, y: 0, @@ -131,7 +131,7 @@ describe('placeWidgetAtBottom', () => { { i: 'widget2', x: 6, y: 0, w: 6, h: 6 }, ]; const result = placeWidgetAtBottom('widget3', existingLayout); - expect(result).toEqual({ + expect(result).toStrictEqual({ i: 'widget3', x: 0, y: 6, @@ -147,7 +147,7 @@ describe('placeWidgetAtBottom', () => { { i: 'widget3', x: 0, y: 6, w: 6, h: 6 }, ]; const result = placeWidgetAtBottom('widget4', existingLayout); - expect(result).toEqual({ + expect(result).toStrictEqual({ i: 'widget4', x: 6, y: 6, @@ -163,7 +163,7 @@ describe('placeWidgetAtBottom', () => { ]; const result = placeWidgetAtBottom('widget3', existingLayout); // y = 2 here as later the react-grid-layout will add 2px to the y value while adjusting the layout - expect(result).toEqual({ + expect(result).toStrictEqual({ i: 'widget3', x: 6, y: 2, @@ -176,7 +176,7 @@ describe('placeWidgetAtBottom', () => { describe('placeWidgetBetweenRows', () => { it('should return single widget layout when layout is empty', () => { const result = placeWidgetBetweenRows('widget1', [], 'currentRow'); - expect(result).toEqual([ + expect(result).toStrictEqual([ { i: 'widget1', x: 0, @@ -195,7 +195,7 @@ describe('placeWidgetBetweenRows', () => { const result = placeWidgetBetweenRows('widget3', existingLayout, 'widget2'); - expect(result).toEqual([ + expect(result).toStrictEqual([ { i: 'widget1', x: 0, y: 0, w: 6, h: 6 }, { i: 'widget2', x: 6, y: 0, w: 6, h: 6 }, { i: 'widget3', x: 0, y: 6, w: 6, h: 6 }, @@ -238,7 +238,7 @@ describe('placeWidgetBetweenRows', () => { 'widget3', ); - expect(result).toEqual([ + expect(result).toStrictEqual([ { h: 1, i: "'widget1'", @@ -292,7 +292,7 @@ describe('placeWidgetBetweenRows', () => { 3, ); - expect(result).toEqual([ + expect(result).toStrictEqual([ { i: 'widget1', x: 0, y: 0, w: 12, h: 4 }, { i: 'widget2', x: 0, y: 4, w: 8, h: 3 }, ]); diff --git a/frontend/src/container/NewWidget/__test__/getUplotChartData.test.ts b/frontend/src/container/NewWidget/__test__/getUplotChartData.test.ts index 26b4def446..5be043bcc3 100644 --- a/frontend/src/container/NewWidget/__test__/getUplotChartData.test.ts +++ b/frontend/src/container/NewWidget/__test__/getUplotChartData.test.ts @@ -12,7 +12,7 @@ describe('getUplotChartData', () => { BarNonStackedChartData.fillSpans, BarNonStackedChartData.stackedBarChart, ); - expect(result).toEqual([ + expect(result).toStrictEqual([ [1758713940, 1758715020], [33.933, 31.767], [20.0, 25.0], @@ -29,10 +29,10 @@ describe('getUplotChartData', () => { // First series: [33.933, 31.767] + [20.0, 25.0] = [53.933, 56.767] // Second series: [20.0, 25.0] (unchanged) expect(result).toHaveLength(3); - expect(result[0]).toEqual([1758713940, 1758715020]); + expect(result[0]).toStrictEqual([1758713940, 1758715020]); expect(result[1][0]).toBeCloseTo(53.933, 3); expect(result[1][1]).toBeCloseTo(56.767, 3); - expect(result[2]).toEqual([20.0, 25.0]); + expect(result[2]).toStrictEqual([20.0, 25.0]); }); it('should return the correct chart data for time series chart', () => { @@ -41,7 +41,7 @@ describe('getUplotChartData', () => { TimeSeriesChartData.fillSpans, TimeSeriesChartData.stackedBarChart, ); - expect(result).toEqual([ + expect(result).toStrictEqual([ [1758713940, 1758715020], [33.933, 31.767], [20.0, 25.0], diff --git a/frontend/src/container/NewWidget/__test__/utils.test.ts b/frontend/src/container/NewWidget/__test__/utils.test.ts index a6ef14d8b3..683d472572 100644 --- a/frontend/src/container/NewWidget/__test__/utils.test.ts +++ b/frontend/src/container/NewWidget/__test__/utils.test.ts @@ -38,7 +38,7 @@ const buildSupersetQuery = (extras?: Record): Query => ({ }); describe('handleQueryChange', () => { - test('sets list-specific fields when switching to LIST', () => { + it('sets list-specific fields when switching to LIST', () => { const superset = buildSupersetQuery(); const output = handleQueryChange( PANEL_TYPES.LIST as keyof PartialPanelTypes, @@ -60,7 +60,7 @@ describe('handleQueryChange', () => { expect(secondQuery.queryName).toBe('B'); }); - test('resets noop and pagination when leaving LIST', () => { + it('resets noop and pagination when leaving LIST', () => { const superset = buildSupersetQuery({ aggregateOperator: 'noop', offset: 5, diff --git a/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx b/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx index 7daf6923c3..04cc4d704d 100644 --- a/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx +++ b/frontend/src/container/OnboardingContainer/Steps/MarkdownStep/MarkdownStep.tsx @@ -86,9 +86,9 @@ export default function MarkdownStep(): JSX.Element { } else if (selectedModule?.id === ModulesMap.AzureMonitoring) { docFilePaths = AzureMonitoringDocFilePaths; } - // @ts-ignore + // @ts-expect-error if (docFilePaths && docFilePaths[path]) { - // @ts-ignore + // @ts-expect-error setMarkdownContent(docFilePaths[path]); } diff --git a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts index 0bca68bf26..5b18090418 100644 --- a/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts +++ b/frontend/src/container/OnboardingContainer/utils/dataSourceUtils.ts @@ -391,7 +391,7 @@ export const getSupportedFrameworks = ({ return []; } - // @ts-ignore + // @ts-expect-error return frameworksMap[moduleID][dataSourceName]; }; diff --git a/frontend/src/container/OnboardingQuestionaire/__tests__/OnboardingQuestionaire.test.tsx b/frontend/src/container/OnboardingQuestionaire/__tests__/OnboardingQuestionaire.test.tsx index 961bd4bbb4..a58648f94b 100644 --- a/frontend/src/container/OnboardingQuestionaire/__tests__/OnboardingQuestionaire.test.tsx +++ b/frontend/src/container/OnboardingQuestionaire/__tests__/OnboardingQuestionaire.test.tsx @@ -103,9 +103,9 @@ describe('OnboardingQuestionaire Component', () => { const othersCheckbox = screen.getByLabelText(/^others$/i); await user.click(othersCheckbox); - expect( - await screen.findByPlaceholderText(/what tool do you currently use/i), - ).toBeInTheDocument(); + await expect( + screen.findByPlaceholderText(/what tool do you currently use/i), + ).resolves.toBeInTheDocument(); }); it('shows migration timeline options only when specific observability tools are selected', async () => { @@ -120,9 +120,9 @@ describe('OnboardingQuestionaire Component', () => { const datadogCheckbox = screen.getByLabelText(/datadog/i); await user.click(datadogCheckbox); - expect( - await screen.findByText(/What is your timeline for migrating to SigNoz/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/What is your timeline for migrating to SigNoz/i), + ).resolves.toBeInTheDocument(); // Not visible when None is selected const noneCheckbox = screen.getByLabelText(/none\/starting fresh/i); @@ -144,9 +144,9 @@ describe('OnboardingQuestionaire Component', () => { const nextButton = screen.getByRole('button', { name: /next/i }); await user.click(nextButton); - expect( - await screen.findByText(/how did you first come across signoz/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByText(/how did you first come across signoz/i, {}), + ).resolves.toBeInTheDocument(); }); }); @@ -162,12 +162,12 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/just exploring/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByText(/set up your workspace/i, {}), - ).toBeInTheDocument(); - expect( - await screen.findByText(/how did you first come across signoz/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByText(/set up your workspace/i, {}), + ).resolves.toBeInTheDocument(); + await expect( + screen.findByText(/how did you first come across signoz/i, {}), + ).resolves.toBeInTheDocument(); }); it('disables next button when fields are empty', async () => { @@ -198,9 +198,9 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/just exploring/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByPlaceholderText(/e\.g\., googling/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByPlaceholderText(/e\.g\., googling/i, {}), + ).resolves.toBeInTheDocument(); const discoverInput = screen.getByPlaceholderText(/e\.g\., googling/i); await user.type(discoverInput, 'Found via Google search'); @@ -225,19 +225,16 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/just exploring/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByText(/what got you interested in signoz/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByText(/what got you interested in signoz/i, {}), + ).resolves.toBeInTheDocument(); const othersCheckbox = screen.getByLabelText(/^others$/i); await user.click(othersCheckbox); - expect( - await screen.findByPlaceholderText( - /what got you interested in signoz/i, - {}, - ), - ).toBeInTheDocument(); + await expect( + screen.findByPlaceholderText(/what got you interested in signoz/i, {}), + ).resolves.toBeInTheDocument(); }); }); @@ -253,9 +250,9 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/just exploring/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByPlaceholderText(/e\.g\., googling/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByPlaceholderText(/e\.g\., googling/i, {}), + ).resolves.toBeInTheDocument(); await user.type( screen.getByPlaceholderText(/e\.g\., googling/i), @@ -264,16 +261,15 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/lowering observability costs/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByText( - /what does your scale approximately look like/i, - {}, - ), - ).toBeInTheDocument(); - expect(await screen.findByText(/logs \/ day/i, {})).toBeInTheDocument(); - expect( - await screen.findByText(/number of services/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByText(/what does your scale approximately look like/i, {}), + ).resolves.toBeInTheDocument(); + await expect( + screen.findByText(/logs \/ day/i, {}), + ).resolves.toBeInTheDocument(); + await expect( + screen.findByText(/number of services/i, {}), + ).resolves.toBeInTheDocument(); }); it('fires PUT to /zeus/profiles and advances to step 4 on success', async () => { @@ -327,9 +323,9 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/just exploring/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByPlaceholderText(/e\.g\., googling/i, {}), - ).toBeInTheDocument(); + await expect( + screen.findByPlaceholderText(/e\.g\., googling/i, {}), + ).resolves.toBeInTheDocument(); await user.type( screen.getByPlaceholderText(/e\.g\., googling/i), @@ -338,9 +334,9 @@ describe('OnboardingQuestionaire Component', () => { await user.click(screen.getByLabelText(/lowering observability costs/i)); await user.click(screen.getByRole('button', { name: /next/i })); - expect( - await screen.findByRole('button', { name: /i'll do this later/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('button', { name: /i'll do this later/i }), + ).resolves.toBeInTheDocument(); }); }); }); diff --git a/frontend/src/container/OrganizationSettings/AuthDomain/__tests__/CreateEdit.test.tsx b/frontend/src/container/OrganizationSettings/AuthDomain/__tests__/CreateEdit.test.tsx index 5d259515fc..8f4b894b85 100644 --- a/frontend/src/container/OrganizationSettings/AuthDomain/__tests__/CreateEdit.test.tsx +++ b/frontend/src/container/OrganizationSettings/AuthDomain/__tests__/CreateEdit.test.tsx @@ -46,16 +46,16 @@ describe('CreateEdit Modal', () => { // Tooltip mouseEnterDelay timers it triggers on the Configure button. fireEvent.click(configureButtons[0]); - expect( - await screen.findByText(/edit google authentication/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/edit google authentication/i), + ).resolves.toBeInTheDocument(); const backButton = screen.getByRole('button', { name: /back/i }); fireEvent.click(backButton); - expect( - await screen.findByText(/configure authentication method/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/configure authentication method/i), + ).resolves.toBeInTheDocument(); }); }); diff --git a/frontend/src/container/PipelinePage/Layouts/ChangeHistory/tests/ChangeHistory.test.tsx b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/tests/ChangeHistory.test.tsx index 7b7869bf6c..36beb1acef 100644 --- a/frontend/src/container/PipelinePage/Layouts/ChangeHistory/tests/ChangeHistory.test.tsx +++ b/frontend/src/container/PipelinePage/Layouts/ChangeHistory/tests/ChangeHistory.test.tsx @@ -45,8 +45,8 @@ describe('ChangeHistory test', () => { ].forEach((text) => expect(getByText(text)).toBeInTheDocument()); // table content - expect(getAllByText('test-user').length).toBe(2); - expect(getAllByText('Deployment was successful').length).toBe(2); + expect(getAllByText('test-user')).toHaveLength(2); + expect(getAllByText('Deployment was successful')).toHaveLength(2); }); it('test deployment stage and icon based on history data', () => { @@ -88,6 +88,6 @@ describe('ChangeHistory test', () => { ).toBeInTheDocument(); expect(getByText('Unknown')).toBeInTheDocument(); - expect(container.querySelectorAll('.ant-table-row').length).toBe(5); + expect(container.querySelectorAll('.ant-table-row')).toHaveLength(5); }); }); diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterInput/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterInput/index.tsx index ec8b2a7fa4..99f02455fe 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterInput/index.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewPipeline/FormFields/FilterInput/index.tsx @@ -73,7 +73,7 @@ function FilterInput({ fieldData }: FilterInputProps): JSX.Element { name={fieldData.name} > {/* Antd form will supply value and onChange here. - // @ts-ignore */} + // @ts-expect-error */} diff --git a/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx b/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx index 8b5b2fa7a7..36184f6219 100644 --- a/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx +++ b/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx @@ -118,7 +118,7 @@ describe('PipelinePage container test', () => { ); // content assertion - expect(container.querySelectorAll('.ant-table-row').length).toBe(2); + expect(container.querySelectorAll('.ant-table-row')).toHaveLength(2); expect(getByText('Apache common parser')).toBeInTheDocument(); expect(getByText('source = nginx')).toBeInTheDocument(); @@ -142,14 +142,14 @@ describe('PipelinePage container test', () => { ); // content assertion - expect(document.querySelectorAll('[data-icon="edit"]').length).toBe(2); + expect(document.querySelectorAll('[data-icon="edit"]')).toHaveLength(2); expect(getByText('add_new_pipeline')).toBeInTheDocument(); // expand action const expandIcon = document.querySelectorAll( '.ant-table-row-expand-icon-cell > span[class*="anticon-right"]', ); - expect(expandIcon.length).toBe(2); + expect(expandIcon).toHaveLength(2); await fireEvent.click(expandIcon[0]); @@ -175,13 +175,13 @@ describe('PipelinePage container test', () => { ); // content assertion - expect(document.querySelectorAll('[data-icon="edit"]').length).toBe(2); + expect(document.querySelectorAll('[data-icon="edit"]')).toHaveLength(2); // expand action const expandIcon = document.querySelectorAll( '.ant-table-row-expand-icon-cell > span[class*="anticon-right"]', ); - expect(expandIcon.length).toBe(2); + expect(expandIcon).toHaveLength(2); await fireEvent.click(expandIcon[0]); const switchToggle = document.querySelector( @@ -196,7 +196,7 @@ describe('PipelinePage container test', () => { '.ant-table-expanded-row [data-icon="delete"]', ); - expect(deleteBtns.length).toBe(3); + expect(deleteBtns).toHaveLength(3); // delete pipeline await fireEvent.click(deleteBtns[0] as HTMLElement); @@ -215,9 +215,8 @@ describe('PipelinePage container test', () => { ); expect( - document.querySelectorAll('.ant-table-expanded-row [data-icon="delete"]') - .length, - ).toBe(2); + document.querySelectorAll('.ant-table-expanded-row [data-icon="delete"]'), + ).toHaveLength(2); }); it('should be able to toggle and delete pipeline', async () => { @@ -250,12 +249,12 @@ describe('PipelinePage container test', () => { const viewPipelineModal = document.querySelector('.ant-modal-wrap'); expect(viewPipelineModal).toBeInTheDocument(); - expect( - await findByText( + await expect( + findByText( viewPipelineModal as unknown as HTMLElement, 'Simulate Processing', ), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); await fireEvent.click( viewPipelineModal?.querySelector( @@ -282,7 +281,7 @@ describe('PipelinePage container test', () => { document.querySelector('.delete-pipeline-ok-text') as HTMLElement, ); - expect(document.querySelectorAll('[data-icon="delete"]').length).toBe(1); + expect(document.querySelectorAll('[data-icon="delete"]')).toHaveLength(1); const saveBtn = getByText('save_configuration'); expect(saveBtn).toBeInTheDocument(); @@ -305,13 +304,13 @@ describe('PipelinePage container test', () => { ); // content assertion - expect(document.querySelectorAll('[data-icon="edit"]').length).toBe(2); + expect(document.querySelectorAll('[data-icon="edit"]')).toHaveLength(2); // expand action const expandIcon = document.querySelectorAll( '.ant-table-row-expand-icon-cell > span[class*="anticon-right"]', ); - expect(expandIcon.length).toBe(2); + expect(expandIcon).toHaveLength(2); await fireEvent.click(expandIcon[0]); const editBtn = document.querySelectorAll('[data-icon="edit"]'); @@ -319,7 +318,7 @@ describe('PipelinePage container test', () => { await fireEvent.click(editBtn[0] as HTMLElement); // to have length 2 - expect(screen.queryAllByText('source = nginx').length).toBe(2); + expect(screen.queryAllByText('source = nginx')).toHaveLength(2); server.use( rest.get(attributeKeysURL, (_req, res, ctx) => diff --git a/frontend/src/container/PipelinePage/tests/utils.test.ts b/frontend/src/container/PipelinePage/tests/utils.test.ts index c21e8c5a4b..9e2caec476 100644 --- a/frontend/src/container/PipelinePage/tests/utils.test.ts +++ b/frontend/src/container/PipelinePage/tests/utils.test.ts @@ -12,53 +12,53 @@ import { } from '../PipelineListsView/utils'; describe('Utils testing of Pipeline Page', () => { - test('it should be check form field of add pipeline', () => { - expect(pipelineFields.length).toBe(3); + it('should be check form field of add pipeline', () => { + expect(pipelineFields).toHaveLength(3); expect(pipelineFields.length).toBeGreaterThan(1); }); - test('it should be check processor types field of add pipeline', () => { + it('should be check processor types field of add pipeline', () => { expect(processorTypes.length).toBeGreaterThan(1); }); - test('it should check form field of add processor', () => { + it('should check form field of add processor', () => { Object.keys(processorFields).forEach((key) => { expect(processorFields[key].length).toBeGreaterThan(1); }); }); - test('it should be check data length of pipeline', () => { - expect(pipelineMockData.length).toBe(2); + it('should be check data length of pipeline', () => { + expect(pipelineMockData).toHaveLength(2); expect(pipelineMockData.length).toBeGreaterThan(0); }); - test('it should be return filtered data and perform deletion', () => { + it('should be return filtered data and perform deletion', () => { const filterData = getElementFromArray( pipelineMockData, pipelineMockData[0], 'id', ); - expect(pipelineMockData).not.toEqual(filterData); - expect(pipelineMockData[0]).not.toEqual(filterData); + expect(pipelineMockData).not.toStrictEqual(filterData); + expect(pipelineMockData[0]).not.toStrictEqual(filterData); }); - test('it should be return index data and perform deletion', () => { + it('should be return index data and perform deletion', () => { const findRecordIndex = getRecordIndex( pipelineMockData, pipelineMockData[0], 'id', ); - expect(pipelineMockData).not.toEqual(findRecordIndex); - expect(pipelineMockData[0]).not.toEqual(findRecordIndex); + expect(pipelineMockData).not.toStrictEqual(findRecordIndex); + expect(pipelineMockData[0]).not.toStrictEqual(findRecordIndex); }); - test('it should be return modified column data', () => { + it('should be return modified column data', () => { const columnData = getTableColumn(processorColumns); - expect(processorColumns).not.toEqual(columnData); - expect(processorColumns.length).toEqual(columnData.length); + expect(processorColumns).not.toStrictEqual(columnData); + expect(processorColumns).toHaveLength(columnData.length); }); - test('it should be return modified column data', () => { + it('should be return modified column data', () => { const findRecordIndex = getRecordIndex( pipelineMockData, pipelineMockData[0], @@ -77,12 +77,12 @@ describe('Utils testing of Pipeline Page', () => { 'name', updatedPipelineData, ); - expect(pipelineMockData).not.toEqual(editedData); - expect(pipelineMockData.length).toEqual(editedData.length); - expect(pipelineMockData[0].name).not.toEqual(editedData[0].name); - expect(pipelineMockData[0].description).not.toEqual( + expect(pipelineMockData).not.toStrictEqual(editedData); + expect(pipelineMockData).toHaveLength(editedData.length); + expect(pipelineMockData[0].name).not.toStrictEqual(editedData[0].name); + expect(pipelineMockData[0].description).not.toStrictEqual( editedData[0].description, ); - expect(pipelineMockData[0].tags).not.toEqual(editedData[0].tags); + expect(pipelineMockData[0].tags).not.toStrictEqual(editedData[0].tags); }); }); diff --git a/frontend/src/container/QueryBuilder/components/RunQueryBtn/__test__/RunQueryBtn.test.tsx b/frontend/src/container/QueryBuilder/components/RunQueryBtn/__test__/RunQueryBtn.test.tsx index bcf8d3ccea..6e0cebd678 100644 --- a/frontend/src/container/QueryBuilder/components/RunQueryBtn/__test__/RunQueryBtn.test.tsx +++ b/frontend/src/container/QueryBuilder/components/RunQueryBtn/__test__/RunQueryBtn.test.tsx @@ -18,7 +18,7 @@ describe('RunQueryBtn', () => { ); }); - test('renders run state and triggers on click', async () => { + it('renders run state and triggers on click', async () => { const user = userEvent.setup(); const onRun = jest.fn(); const onCancel = jest.fn(); @@ -35,7 +35,7 @@ describe('RunQueryBtn', () => { expect(onRun).toHaveBeenCalledTimes(1); }); - test('shows cancel state and calls handleCancelQuery', async () => { + it('shows cancel state and calls handleCancelQuery', async () => { const user = userEvent.setup(); const onRun = jest.fn(); const onCancel = jest.fn(); @@ -51,19 +51,19 @@ describe('RunQueryBtn', () => { expect(onCancel).toHaveBeenCalledTimes(1); }); - test('disabled when disabled prop is true', () => { + it('disabled when disabled prop is true', () => { render(); expect(screen.getByRole('button', { name: /run query/i })).toBeDisabled(); }); - test('disabled when no props provided', () => { + it('disabled when no props provided', () => { render(); expect( screen.getByRole('button', { name: /run query/i }), ).toBeInTheDocument(); }); - test('shows Command + CornerDownLeft on mac', () => { + it('shows Command + CornerDownLeft on mac', () => { const { container } = render( { ).toBeInTheDocument(); }); - test('shows ChevronUp + CornerDownLeft on non-mac', () => { + it('shows ChevronUp + CornerDownLeft on non-mac', () => { (getUserOperatingSystem as jest.Mock).mockReturnValue( UserOperatingSystem.WINDOWS, ); @@ -95,7 +95,7 @@ describe('RunQueryBtn', () => { ).toBeInTheDocument(); }); - test('renders custom label when provided', () => { + it('renders custom label when provided', () => { render( { fireEvent.focus(select); await waitFor(() => expect(dataSourceCalls.length).toBeGreaterThanOrEqual(1)); - expect(dataSourceCalls[0]).toEqual('meter'); + expect(dataSourceCalls[0]).toBe('meter'); const input = select.querySelector('input') as HTMLInputElement; const user = userEvent.setup({ pointerEventsCheck: 0 }); @@ -94,12 +94,12 @@ describe('GroupByFilter', () => { await user.click(option); expect(onChange).toHaveBeenCalled(); - expect(dataSourceCalls[dataSourceCalls.length - 1]).toEqual('meter'); + expect(dataSourceCalls[dataSourceCalls.length - 1]).toBe('meter'); const emitted = onChange.mock.calls[0][0][0]; - expect(emitted.key).toEqual('custom.attr'); + expect(emitted.key).toBe('custom.attr'); expect(emitted.id).toBeTruthy(); - expect(emitted.id).not.toEqual('----'); + expect(emitted.id).not.toBe('----'); }); it('clicks suggested option and emits proper value', async () => { server.use( @@ -139,7 +139,7 @@ describe('GroupByFilter', () => { // Expect payload to be resolved to the exact attribute returned by MSW await waitFor(() => expect(onChange).toHaveBeenCalled()); const emitted = onChange.mock.calls[0][0][0]; - expect(emitted.key).toEqual('service.name'); - expect(emitted.id).toEqual('service.name--string--'); + expect(emitted.key).toBe('service.name'); + expect(emitted.id).toBe('service.name--string--'); }); }); diff --git a/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx b/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx index c32200abaf..987a5d7528 100644 --- a/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx +++ b/frontend/src/container/QueryBuilder/filters/HavingFilter/__tests__/utils.test.tsx @@ -25,7 +25,7 @@ const valueWithAttributeAndOperator: IBuilderQuery = { }; describe('Having filter behaviour', () => { - test('Having filter render is rendered', () => { + it('Having filter render is rendered', () => { const mockFn = jest.fn(); const { unmount } = render( { unmount(); }); - test('Having render is disabled initially', () => { + it('Having render is disabled initially', () => { const mockFn = jest.fn(); const { unmount } = render( { unmount(); }); - test('Is having filter is enable', () => { + it('Is having filter is enable', () => { const mockFn = jest.fn(); const { unmount } = render( { unmount(); }); - test('Autocomplete in the having filter', async () => { + it('Autocomplete in the having filter', async () => { const onChange = jest.fn(); const user = userEvent.setup(); @@ -126,7 +126,7 @@ describe('Having filter behaviour', () => { // show operators after SUM(bytes) const operatorsOptions = screen.getAllByTitle(optionTestTitle); - expect(operatorsOptions.length).toEqual(HAVING_OPERATORS.length); + expect(operatorsOptions).toHaveLength(HAVING_OPERATORS.length); // show operators after SUM(bytes) when type from keyboard await user.clear(input); @@ -136,7 +136,7 @@ describe('Having filter behaviour', () => { // get filtered operators const filteredOperators = screen.getAllByTitle(optionTestTitle); - expect(filteredOperators.length).toEqual(1); + expect(filteredOperators).toHaveLength(1); // clear and show again all operators await user.clear(input); @@ -144,7 +144,7 @@ describe('Having filter behaviour', () => { const returnedOptions = screen.getAllByTitle(optionTestTitle); - expect(returnedOptions.length).toEqual(HAVING_OPERATORS.length); + expect(returnedOptions).toHaveLength(HAVING_OPERATORS.length); // check write value after operator await user.clear(input); diff --git a/frontend/src/container/QueryBuilder/filters/MetricNameSelector/MetricNameSelector.test.tsx b/frontend/src/container/QueryBuilder/filters/MetricNameSelector/MetricNameSelector.test.tsx index 4808db65b3..3548df1820 100644 --- a/frontend/src/container/QueryBuilder/filters/MetricNameSelector/MetricNameSelector.test.tsx +++ b/frontend/src/container/QueryBuilder/filters/MetricNameSelector/MetricNameSelector.test.tsx @@ -350,8 +350,11 @@ describe('selecting a metric type updates the aggregation options', () => { fireEvent.change(input, { target: { value: 'http_requests_total' } }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual(['Rate', 'Increase']); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([ + 'Rate', + 'Increase', + ]); + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'Sum', 'Avg', 'Min', @@ -373,7 +376,7 @@ describe('selecting a metric type updates the aggregation options', () => { fireEvent.change(input, { target: { value: 'cpu_usage_percent' } }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([ 'Latest', 'Sum', 'Avg', @@ -382,7 +385,7 @@ describe('selecting a metric type updates the aggregation options', () => { 'Count', 'Count Distinct', ]); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'Sum', 'Avg', 'Min', @@ -407,7 +410,7 @@ describe('selecting a metric type updates the aggregation options', () => { }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([ 'Latest', 'Sum', 'Avg', @@ -416,7 +419,7 @@ describe('selecting a metric type updates the aggregation options', () => { 'Count', 'Count Distinct', ]); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'Sum', 'Avg', 'Min', @@ -440,8 +443,8 @@ describe('selecting a metric type updates the aggregation options', () => { }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual([]); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([]); + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'P50', 'P75', 'P90', @@ -466,8 +469,8 @@ describe('selecting a metric type updates the aggregation options', () => { }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual([]); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([]); + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'P50', 'P75', 'P90', @@ -485,7 +488,7 @@ describe('selecting a metric type updates the aggregation options', () => { fireEvent.change(input, { target: { value: 'unknown_metric' } }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([ 'Max', 'Min', 'Sum', @@ -494,7 +497,7 @@ describe('selecting a metric type updates the aggregation options', () => { 'Rate', 'Increase', ]); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'Sum', 'Avg', 'Min', @@ -970,7 +973,7 @@ describe('Summary metric type is treated as Gauge', () => { }); fireEvent.blur(input); - expect(getOptionLabels('time-agg-options')).toEqual([ + expect(getOptionLabels('time-agg-options')).toStrictEqual([ 'Latest', 'Sum', 'Avg', @@ -979,7 +982,7 @@ describe('Summary metric type is treated as Gauge', () => { 'Count', 'Count Distinct', ]); - expect(getOptionLabels('space-agg-options')).toEqual([ + expect(getOptionLabels('space-agg-options')).toStrictEqual([ 'Sum', 'Avg', 'Min', diff --git a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/__test__/SpanScopeSelector.test.tsx b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/__test__/SpanScopeSelector.test.tsx index 404e721495..c804ac7ec3 100644 --- a/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/__test__/SpanScopeSelector.test.tsx +++ b/frontend/src/container/QueryBuilder/filters/QueryBuilderSearchV2/__test__/SpanScopeSelector.test.tsx @@ -203,7 +203,7 @@ describe('SpanScopeSelector', () => { createSpanScopeFilter(filterKey), ]); renderWithContext(queryWithFilter, undefined, defaultQueryBuilderQuery); - expect(await screen.findByText(expectedText)).toBeInTheDocument(); + await expect(screen.findByText(expectedText)).resolves.toBeInTheDocument(); }, ); }); @@ -238,17 +238,17 @@ describe('SpanScopeSelector', () => { ); if (expectedScopeKey) { - expect(scopeFiltersInPayload.length).toBe(1); + expect(scopeFiltersInPayload).toHaveLength(1); expect(scopeFiltersInPayload[0].key?.key).toBe(expectedScopeKey); expect(scopeFiltersInPayload[0].value).toBe('true'); expect(scopeFiltersInPayload[0].op).toBe('='); } else { - expect(scopeFiltersInPayload.length).toBe(0); + expect(scopeFiltersInPayload).toHaveLength(0); } const expectedTotalFilters = expectedNonScopeItems.length + (expectedScopeKey ? 1 : 0); - expect(items.length).toBe(expectedTotalFilters); + expect(items).toHaveLength(expectedTotalFilters); }; beforeEach(() => { @@ -259,19 +259,21 @@ describe('SpanScopeSelector', () => { it('should initialize with ALL_SPANS if query prop has no scope filters', async () => { const localQuery = createLocalQuery(); renderWithContext(defaultQuery, mockOnChange, localQuery); - expect(await screen.findByText('All Spans')).toBeInTheDocument(); + await expect(screen.findByText('All Spans')).resolves.toBeInTheDocument(); }); it('should initialize with ROOT_SPANS if query prop has isRoot filter', async () => { const localQuery = createLocalQuery([createSpanScopeFilter('isRoot')]); renderWithContext(defaultQuery, mockOnChange, localQuery); - expect(await screen.findByText('Root Spans')).toBeInTheDocument(); + await expect(screen.findByText('Root Spans')).resolves.toBeInTheDocument(); }); it('should initialize with ENTRYPOINT_SPANS if query prop has isEntryPoint filter', async () => { const localQuery = createLocalQuery([createSpanScopeFilter('isEntryPoint')]); renderWithContext(defaultQuery, mockOnChange, localQuery); - expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument(); + await expect( + screen.findByText('Entrypoint Spans'), + ).resolves.toBeInTheDocument(); }); it('should call onChange and not redirect when selecting ROOT_SPANS (from ALL_SPANS)', async () => { @@ -282,7 +284,7 @@ describe('SpanScopeSelector', () => { localQuery, true, ); - expect(await screen.findByText('All Spans')).toBeInTheDocument(); + await expect(screen.findByText('All Spans')).resolves.toBeInTheDocument(); await selectOption('Root Spans'); @@ -302,7 +304,7 @@ describe('SpanScopeSelector', () => { localQuery, true, ); - expect(await screen.findByText('Root Spans')).toBeInTheDocument(); + await expect(screen.findByText('Root Spans')).resolves.toBeInTheDocument(); await selectOption('All Spans'); @@ -323,7 +325,7 @@ describe('SpanScopeSelector', () => { localQuery, true, ); - expect(await screen.findByText('Root Spans')).toBeInTheDocument(); + await expect(screen.findByText('Root Spans')).resolves.toBeInTheDocument(); await selectOption('Entrypoint Spans'); @@ -345,7 +347,7 @@ describe('SpanScopeSelector', () => { localQuery, true, ); - expect(await screen.findByText('Root Spans')).toBeInTheDocument(); + await expect(screen.findByText('Root Spans')).resolves.toBeInTheDocument(); await selectOption('Entrypoint Spans'); @@ -372,7 +374,9 @@ describe('SpanScopeSelector', () => { localQuery, true, ); - expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument(); + await expect( + screen.findByText('Entrypoint Spans'), + ).resolves.toBeInTheDocument(); await selectOption('All Spans'); @@ -418,7 +422,7 @@ describe('SpanScopeSelector', () => { , ); - expect(await screen.findByText('All Spans')).toBeInTheDocument(); + await expect(screen.findByText('All Spans')).resolves.toBeInTheDocument(); await selectOption('Entrypoint Spans'); diff --git a/frontend/src/container/QueryTable/Drilldown/__tests__/Breakout.test.tsx b/frontend/src/container/QueryTable/Drilldown/__tests__/Breakout.test.tsx index fd69dfbc2e..2013fc9d2e 100644 --- a/frontend/src/container/QueryTable/Drilldown/__tests__/Breakout.test.tsx +++ b/frontend/src/container/QueryTable/Drilldown/__tests__/Breakout.test.tsx @@ -236,10 +236,10 @@ describe('TableDrilldown Breakout Functionality', () => { // Verify that the groupBy has been updated to only contain the selected breakout option expect(aggregateQueryData.groupBy).toHaveLength(1); - expect(aggregateQueryData.groupBy[0].key).toEqual('deployment.environment'); + expect(aggregateQueryData.groupBy[0].key).toBe('deployment.environment'); // Verify that orderBy has been cleared (as per getBreakoutQuery logic) - expect(aggregateQueryData.orderBy).toEqual([]); + expect(aggregateQueryData.orderBy).toStrictEqual([]); // Verify that the legend has been updated (check the actual value being returned) // The legend logic in getBreakoutQuery: legend: item.legend && groupBy.key ? `{{${groupBy.key}}}` : '' @@ -247,7 +247,7 @@ describe('TableDrilldown Breakout Functionality', () => { expect(aggregateQueryData.legend).toBeDefined(); // Check that the queryParams contain the expandedWidgetId - expect(queryParams).toEqual({ + expect(queryParams).toStrictEqual({ expandedWidgetId: 'test-widget', graphType: 'graph', }); diff --git a/frontend/src/container/QueryTable/Drilldown/__tests__/TableDrilldown.test.tsx b/frontend/src/container/QueryTable/Drilldown/__tests__/TableDrilldown.test.tsx index f19ca41edb..8797de2c00 100644 --- a/frontend/src/container/QueryTable/Drilldown/__tests__/TableDrilldown.test.tsx +++ b/frontend/src/container/QueryTable/Drilldown/__tests__/TableDrilldown.test.tsx @@ -206,7 +206,7 @@ describe('TableDrilldown', () => { expect(firstQueryData.filters).toBeDefined(); // Check that newTab option is set to true - expect(options).toEqual({ newTab: true }); + expect(options).toStrictEqual({ newTab: true }); }); it('should include timestamps in logs explorer URL when "View in Logs" is clicked', (): void => { @@ -278,10 +278,12 @@ describe('TableDrilldown', () => { // Check that the query contains the correct filter expression // The filter should include the clicked data filters (service.name = 'adservice', trace_id = 'df2cfb0e57bb8736207689851478cd50') const firstQueryData = compositeQuery.builder.queryData[0]; - expect(firstQueryData.filter.expression).toEqual(MOCK_QUERY_WITH_FILTER); + expect(firstQueryData.filter.expression).toStrictEqual( + MOCK_QUERY_WITH_FILTER, + ); // Check that newTab option is set to true - expect(options).toEqual({ newTab: true }); + expect(options).toStrictEqual({ newTab: true }); }); it('should include timestamps in traces explorer URL when "View in Traces" is clicked', (): void => { @@ -358,7 +360,7 @@ describe('TableDrilldown', () => { ); // Check that the queryParams contain the expandedWidgetId - expect(queryParams).toEqual({ expandedWidgetId: 'test-widget' }); + expect(queryParams).toStrictEqual({ expandedWidgetId: 'test-widget' }); // Check that newTab is true expect(newTab).toBe(true); diff --git a/frontend/src/container/QueryTable/__test__/QueryTable.test.tsx b/frontend/src/container/QueryTable/__test__/QueryTable.test.tsx index 1c8b94f31b..53fedb473a 100644 --- a/frontend/src/container/QueryTable/__test__/QueryTable.test.tsx +++ b/frontend/src/container/QueryTable/__test__/QueryTable.test.tsx @@ -32,7 +32,7 @@ describe('QueryTable -', () => { it('should render correctly with all the data rows', () => { const { container } = render(); const tableRows = container.querySelectorAll('tr.ant-table-row'); - expect(tableRows.length).toBe(QueryTableProps.queryTableData.rows.length); + expect(tableRows).toHaveLength(QueryTableProps.queryTableData.rows.length); }); it('should render correctly with searchTerm', () => { @@ -40,7 +40,7 @@ describe('QueryTable -', () => { , ); const tableRows = container.querySelectorAll('tr.ant-table-row'); - expect(tableRows.length).toBe(3); + expect(tableRows).toHaveLength(3); }); }); diff --git a/frontend/src/container/RolesSettings/RoleDetails/__tests__/RoleDetailsPage.test.tsx b/frontend/src/container/RolesSettings/RoleDetails/__tests__/RoleDetailsPage.test.tsx index f17f470485..3761cc7076 100644 --- a/frontend/src/container/RolesSettings/RoleDetails/__tests__/RoleDetailsPage.test.tsx +++ b/frontend/src/container/RolesSettings/RoleDetails/__tests__/RoleDetailsPage.test.tsx @@ -65,7 +65,9 @@ describe.skip('RoleDetailsPage', () => { initialRoute: `/settings/roles/${CUSTOM_ROLE_ID}`, }); - expect(await screen.findByText('Role — billing-manager')).toBeInTheDocument(); + await expect( + screen.findByText('Role — billing-manager'), + ).resolves.toBeInTheDocument(); // Tab navigation expect(screen.getByText('Overview')).toBeInTheDocument(); @@ -96,7 +98,9 @@ describe.skip('RoleDetailsPage', () => { initialRoute: `/settings/roles/${MANAGED_ROLE_ID}`, }); - expect(await screen.findByText(/Role — signoz-admin/)).toBeInTheDocument(); + await expect( + screen.findByText(/Role — signoz-admin/), + ).resolves.toBeInTheDocument(); expect( screen.getByText( @@ -148,11 +152,11 @@ describe.skip('RoleDetailsPage', () => { // Open the edit modal await user.click(screen.getByRole('button', { name: /edit role details/i })); - expect( - await screen.findByText('Edit Role Details', { + await expect( + screen.findByText('Edit Role Details', { selector: '.ant-modal-title', }), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); // Name field is disabled in edit mode (role rename is not allowed) const nameInput = screen.getByPlaceholderText( @@ -182,7 +186,9 @@ describe.skip('RoleDetailsPage', () => { ).not.toBeInTheDocument(), ); - expect(await screen.findByText('Updated description')).toBeInTheDocument(); + await expect( + screen.findByText('Updated description'), + ).resolves.toBeInTheDocument(); }); it('delete flow: modal shows role name, DELETE called on confirm', async () => { @@ -206,9 +212,9 @@ describe.skip('RoleDetailsPage', () => { await user.click(screen.getByRole('button', { name: /delete role/i })); - expect( - await screen.findByText(/Are you sure you want to delete the role/), - ).toBeInTheDocument(); + await expect( + screen.findByText(/Are you sure you want to delete the role/), + ).resolves.toBeInTheDocument(); const dialog = await screen.findByRole('dialog'); await user.click( diff --git a/frontend/src/container/RolesSettings/__tests__/RolesSettings.test.tsx b/frontend/src/container/RolesSettings/__tests__/RolesSettings.test.tsx index a2364fd9e9..2ddef2f825 100644 --- a/frontend/src/container/RolesSettings/__tests__/RolesSettings.test.tsx +++ b/frontend/src/container/RolesSettings/__tests__/RolesSettings.test.tsx @@ -42,7 +42,7 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('signoz-admin')).toBeInTheDocument(); + await expect(screen.findByText('signoz-admin')).resolves.toBeInTheDocument(); // Section headers expect(screen.getByText('Managed roles')).toBeInTheDocument(); @@ -76,14 +76,16 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('signoz-admin')).toBeInTheDocument(); + await expect(screen.findByText('signoz-admin')).resolves.toBeInTheDocument(); const user = userEvent.setup({ pointerEventsCheck: 0 }); const searchInput = screen.getByPlaceholderText('Search for roles...'); await user.type(searchInput, 'billing'); - expect(await screen.findByText('billing-manager')).toBeInTheDocument(); + await expect( + screen.findByText('billing-manager'), + ).resolves.toBeInTheDocument(); expect(screen.queryByText('signoz-admin')).not.toBeInTheDocument(); expect(screen.queryByText('signoz-editor')).not.toBeInTheDocument(); expect(screen.queryByText('dashboard-creator')).not.toBeInTheDocument(); @@ -98,14 +100,14 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('signoz-admin')).toBeInTheDocument(); + await expect(screen.findByText('signoz-admin')).resolves.toBeInTheDocument(); const user = userEvent.setup({ pointerEventsCheck: 0 }); const searchInput = screen.getByPlaceholderText('Search for roles...'); await user.type(searchInput, 'read-only'); - expect(await screen.findByText('signoz-viewer')).toBeInTheDocument(); + await expect(screen.findByText('signoz-viewer')).resolves.toBeInTheDocument(); expect(screen.queryByText('signoz-admin')).not.toBeInTheDocument(); expect(screen.queryByText('billing-manager')).not.toBeInTheDocument(); }); @@ -119,16 +121,16 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('signoz-admin')).toBeInTheDocument(); + await expect(screen.findByText('signoz-admin')).resolves.toBeInTheDocument(); const user = userEvent.setup({ pointerEventsCheck: 0 }); const searchInput = screen.getByPlaceholderText('Search for roles...'); await user.type(searchInput, 'nonexistentrole'); - expect( - await screen.findByText('No roles match your search.'), - ).toBeInTheDocument(); + await expect( + screen.findByText('No roles match your search.'), + ).resolves.toBeInTheDocument(); }); it('shows loading skeleton while fetching', () => { @@ -163,7 +165,7 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText(errorMessage)).toBeInTheDocument(); + await expect(screen.findByText(errorMessage)).resolves.toBeInTheDocument(); }); it('shows empty state when API returns no roles', async () => { @@ -175,7 +177,9 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('No roles found.')).toBeInTheDocument(); + await expect( + screen.findByText('No roles found.'), + ).resolves.toBeInTheDocument(); }); it('renders descriptions for all roles', async () => { @@ -187,7 +191,7 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('signoz-admin')).toBeInTheDocument(); + await expect(screen.findByText('signoz-admin')).resolves.toBeInTheDocument(); for (const role of allRoles) { if (role.description) { @@ -221,7 +225,9 @@ describe('RolesSettings', () => { render(); - expect(await screen.findByText('invalid-date-role')).toBeInTheDocument(); + await expect( + screen.findByText('invalid-date-role'), + ).resolves.toBeInTheDocument(); // Verify the "—" (em-dash) fallback is shown for both cells const dashFallback = screen.getAllByText('—'); diff --git a/frontend/src/container/RolesSettings/__tests__/utils.test.ts b/frontend/src/container/RolesSettings/__tests__/utils.test.ts index de2ffeb69d..8f297842cd 100644 --- a/frontend/src/container/RolesSettings/__tests__/utils.test.ts +++ b/frontend/src/container/RolesSettings/__tests__/utils.test.ts @@ -81,7 +81,7 @@ describe('buildPatchPayload', () => { authzRes: baseAuthzResources, }); - expect(result.additions).toEqual([ + expect(result.additions).toStrictEqual([ { resource: dashboardResource, selectors: [ID_B] }, ]); expect(result.deletions).toBeNull(); @@ -110,7 +110,7 @@ describe('buildPatchPayload', () => { authzRes: baseAuthzResources, }); - expect(result.deletions).toEqual([ + expect(result.deletions).toStrictEqual([ { resource: dashboardResource, selectors: [ID_B] }, ]); expect(result.additions).toBeNull(); @@ -163,10 +163,10 @@ describe('buildPatchPayload', () => { authzRes: baseAuthzResources, }); - expect(result.deletions).toEqual([ + expect(result.deletions).toStrictEqual([ { resource: dashboardResource, selectors: ['*'] }, ]); - expect(result.additions).toEqual([ + expect(result.additions).toStrictEqual([ { resource: dashboardResource, selectors: [ID_A, ID_B] }, ]); }); @@ -188,7 +188,7 @@ describe('buildPatchPayload', () => { authzRes: baseAuthzResources, }); - expect(result.deletions).toEqual([ + expect(result.deletions).toStrictEqual([ { resource: dashboardResource, selectors: ['*'] }, ]); expect(result.additions).toBeNull(); @@ -211,7 +211,7 @@ describe('buildPatchPayload', () => { authzRes: baseAuthzResources, }); - expect(result.additions).toEqual([ + expect(result.additions).toStrictEqual([ { resource: alertResource, selectors: [ID_B] }, ]); expect(result.deletions).toBeNull(); @@ -226,7 +226,7 @@ describe('objectsToPermissionConfig', () => { const result = objectsToPermissionConfig(objects, resourceDefs); - expect(result.dashboard).toEqual({ + expect(result.dashboard).toStrictEqual({ scope: PermissionScope.ALL, selectedIds: [], }); @@ -239,7 +239,7 @@ describe('objectsToPermissionConfig', () => { const result = objectsToPermissionConfig(objects, resourceDefs); - expect(result.dashboard).toEqual({ + expect(result.dashboard).toStrictEqual({ scope: PermissionScope.ONLY_SELECTED, selectedIds: [ID_A, ID_B], }); @@ -248,11 +248,11 @@ describe('objectsToPermissionConfig', () => { it('defaults to ONLY_SELECTED with empty selectedIds when resource is absent from API response', () => { const result = objectsToPermissionConfig([], resourceDefs); - expect(result.dashboard).toEqual({ + expect(result.dashboard).toStrictEqual({ scope: PermissionScope.ONLY_SELECTED, selectedIds: [], }); - expect(result.alert).toEqual({ + expect(result.alert).toStrictEqual({ scope: PermissionScope.ONLY_SELECTED, selectedIds: [], }); @@ -321,18 +321,18 @@ describe('buildConfig', () => { const result = buildConfig(resourceDefs, initial); - expect(result.dashboard).toEqual({ + expect(result.dashboard).toStrictEqual({ scope: PermissionScope.ALL, selectedIds: [], }); - expect(result.alert).toEqual(DEFAULT_RESOURCE_CONFIG); + expect(result.alert).toStrictEqual(DEFAULT_RESOURCE_CONFIG); }); it('applies DEFAULT_RESOURCE_CONFIG to all resources when no initial is provided', () => { const result = buildConfig(resourceDefs); - expect(result.dashboard).toEqual(DEFAULT_RESOURCE_CONFIG); - expect(result.alert).toEqual(DEFAULT_RESOURCE_CONFIG); + expect(result.dashboard).toStrictEqual(DEFAULT_RESOURCE_CONFIG); + expect(result.alert).toStrictEqual(DEFAULT_RESOURCE_CONFIG); }); }); @@ -347,14 +347,14 @@ describe('derivePermissionTypes', () => { const result = derivePermissionTypes(relations); expect(result).toHaveLength(3); - expect(result.map((p) => p.key)).toEqual(['create', 'read', 'delete']); + expect(result.map((p) => p.key)).toStrictEqual(['create', 'read', 'delete']); expect(result[0].label).toBe('Create'); }); it('falls back to the default set of permission types when relations is null', () => { const result = derivePermissionTypes(null); - expect(result.map((p) => p.key)).toEqual([ + expect(result.map((p) => p.key)).toStrictEqual([ 'create', 'list', 'read', @@ -369,7 +369,7 @@ describe('deriveResourcesForRelation', () => { const result = deriveResourcesForRelation(baseAuthzResources, 'create'); expect(result).toHaveLength(2); - expect(result.map((r) => r.id)).toEqual(['dashboard', 'alert']); + expect(result.map((r) => r.id)).toStrictEqual(['dashboard', 'alert']); }); it('returns an empty array when authzResources is null', () => { diff --git a/frontend/src/container/RoutingPolicies/__tests__/useRoutingPolicies.test.tsx b/frontend/src/container/RoutingPolicies/__tests__/useRoutingPolicies.test.tsx index 586c3d99c7..ddb6aca94a 100644 --- a/frontend/src/container/RoutingPolicies/__tests__/useRoutingPolicies.test.tsx +++ b/frontend/src/container/RoutingPolicies/__tests__/useRoutingPolicies.test.tsx @@ -125,7 +125,7 @@ describe('useRoutingPolicies', () => { expect(result.current.searchTerm).toBe(''); expect(result.current.routingPoliciesData).toHaveLength(2); - expect(result.current.routingPoliciesData).toEqual( + expect(result.current.routingPoliciesData).toStrictEqual( expect.arrayContaining([ expect.objectContaining({ name: MOCK_ROUTING_POLICY_1.name }), expect.objectContaining({ name: MOCK_ROUTING_POLICY_2.name }), diff --git a/frontend/src/container/ServiceAccountsSettings/__tests__/ServiceAccountsSettings.integration.test.tsx b/frontend/src/container/ServiceAccountsSettings/__tests__/ServiceAccountsSettings.integration.test.tsx index 76bfbe3cc5..5f36aba704 100644 --- a/frontend/src/container/ServiceAccountsSettings/__tests__/ServiceAccountsSettings.integration.test.tsx +++ b/frontend/src/container/ServiceAccountsSettings/__tests__/ServiceAccountsSettings.integration.test.tsx @@ -174,9 +174,9 @@ describe('ServiceAccountsSettings (integration)', () => { }), ); - expect( - await screen.findByRole('button', { name: /Delete Service Account/i }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('button', { name: /Delete Service Account/i }), + ).resolves.toBeInTheDocument(); }); it('saving changes in the drawer refetches the list', async () => { @@ -250,10 +250,10 @@ describe('ServiceAccountsSettings (integration)', () => { , ); - expect( - await screen.findByText( + await expect( + screen.findByText( /An unexpected error occurred while fetching service accounts/i, ), - ).toBeInTheDocument(); + ).resolves.toBeInTheDocument(); }); }); diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx index 4fa203ba67..5b333a35e0 100644 --- a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx @@ -22,7 +22,7 @@ describe('ServicesUsingMetrics', () => { jest.clearAllMocks(); }); - test('should render the ServicesUsingMetrics component', async () => { + it('should render the ServicesUsingMetrics component', async () => { // Mock successful API response mockUseGetTopLevelOperations.mockReturnValue({ data: { @@ -90,7 +90,7 @@ describe('ServicesUsingMetrics', () => { expect(screen.getByText(/error rate \(% of total\)/i)).toBeInTheDocument(); }); - test('should render the ServicesUsingMetrics component with loading', async () => { + it('should render the ServicesUsingMetrics component with loading', async () => { // Mock loading state mockUseGetTopLevelOperations.mockReturnValue({ data: undefined, @@ -124,7 +124,7 @@ describe('ServicesUsingMetrics', () => { expect(screen.getByLabelText(/loading/i)).toBeInTheDocument(); }); - test('should not render if the data is not present', async () => { + it('should not render if the data is not present', async () => { // Mock successful API response with data mockUseGetTopLevelOperations.mockReturnValue({ data: { diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx index fc6e97e4b4..e3865eae9c 100644 --- a/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx +++ b/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx @@ -3,7 +3,7 @@ import { act, fireEvent, render, screen } from 'tests/test-utils'; import ServiceTraces from '.'; describe('ServicesTraces', () => { - test('Should render the component', async () => { + it('Should render the component', async () => { await act(() => { render(); }); @@ -15,7 +15,7 @@ describe('ServicesTraces', () => { expect(errorRateHeader).toBeInTheDocument(); }); - test('Should render the Services with Services', async () => { + it('Should render the Services with Services', async () => { act(() => { render(); }); @@ -27,7 +27,7 @@ describe('ServicesTraces', () => { expect(p99Latency).toBeInTheDocument(); }); - test('Should click on p99 latency and sort the table', async () => { + it('Should click on p99 latency and sort the table', async () => { act(() => { render(); }); diff --git a/frontend/src/container/SideNav/helper.test.ts b/frontend/src/container/SideNav/helper.test.ts index ada551242e..f075f18427 100644 --- a/frontend/src/container/SideNav/helper.test.ts +++ b/frontend/src/container/SideNav/helper.test.ts @@ -9,7 +9,7 @@ describe('getQueryString', () => { const result = getQueryString(availableParams, params); - expect(result).toEqual(['param1=value1', 'param2=value2', '']); + expect(result).toStrictEqual(['param1=value1', 'param2=value2', '']); }); it('returns an array of empty strings if no matching parameters are found', () => { @@ -18,7 +18,7 @@ describe('getQueryString', () => { const result = getQueryString(availableParams, params); - expect(result).toEqual(['', '', '']); + expect(result).toStrictEqual(['', '', '']); }); it('returns an empty array if the available parameters list is empty', () => { @@ -29,6 +29,6 @@ describe('getQueryString', () => { const result = getQueryString(availableParams, params); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); }); diff --git a/frontend/src/container/Trace/Graph/config.ts b/frontend/src/container/Trace/Graph/config.ts index 831825c477..c3e47476ea 100644 --- a/frontend/src/container/Trace/Graph/config.ts +++ b/frontend/src/container/Trace/Graph/config.ts @@ -71,7 +71,7 @@ export const getChartDataforGroupBy = ( const allGroupBy = Object.keys(items).map((e) => items[e].groupBy); keys(allGroupBy).forEach((e: string): void => { - // @ts-ignore + // @ts-expect-error const { length } = keys(allGroupBy[e]); if (length >= max) { diff --git a/frontend/src/container/Trace/TraceGraphFilter/utils.test.ts b/frontend/src/container/Trace/TraceGraphFilter/utils.test.ts index 24787194b3..fcfbf6bd04 100644 --- a/frontend/src/container/Trace/TraceGraphFilter/utils.test.ts +++ b/frontend/src/container/Trace/TraceGraphFilter/utils.test.ts @@ -15,13 +15,13 @@ describe('TraceGraphFilter/utils', () => { it('should return the correct value', () => { const selectedGroupBy = '1'; const result = selectedGroupByValue(selectedGroupBy, options); - expect(result).toEqual(selectedGroupBy); + expect(result).toStrictEqual(selectedGroupBy); }); it('should return the correct value when selectedOption not found', () => { const selectedGroupBy = '3'; const result = selectedGroupByValue(selectedGroupBy, options); - expect(result).toEqual(selectedGroupBy); + expect(result).toStrictEqual(selectedGroupBy); }); }); diff --git a/frontend/src/container/TraceDetail/utils.test.ts b/frontend/src/container/TraceDetail/utils.test.ts index afba7fa10f..5b4e5d8026 100644 --- a/frontend/src/container/TraceDetail/utils.test.ts +++ b/frontend/src/container/TraceDetail/utils.test.ts @@ -18,17 +18,17 @@ describe('traces/getTreeLevelsCount', () => { statusMessage: '', }); - test('should return 0 for empty tree', () => { + it('should return 0 for empty tree', () => { const emptyTree = null; expect(getTreeLevelsCount(emptyTree as unknown as ITraceTree)).toBe(0); }); - test('should return 1 for a tree with a single node', () => { + it('should return 1 for a tree with a single node', () => { const singleNodeTree = createNode('1'); expect(getTreeLevelsCount(singleNodeTree)).toBe(1); }); - test('should return correct depth for a balanced tree', () => { + it('should return correct depth for a balanced tree', () => { const tree = createNode('1', [ createNode('2', [createNode('4'), createNode('5')]), createNode('3', [createNode('6'), createNode('7')]), @@ -37,7 +37,7 @@ describe('traces/getTreeLevelsCount', () => { expect(getTreeLevelsCount(tree)).toBe(3); }); - test('should return correct depth for an unbalanced tree', () => { + it('should return correct depth for an unbalanced tree', () => { const tree = createNode('1', [ createNode('2', [ createNode('4', [createNode('8', [createNode('11')])]), @@ -49,7 +49,7 @@ describe('traces/getTreeLevelsCount', () => { expect(getTreeLevelsCount(tree)).toBe(5); }); - test('should return correct depth for a tree with single child nodes', () => { + it('should return correct depth for a tree with single child nodes', () => { const tree = createNode('1', [ createNode('2', [createNode('3', [createNode('4', [createNode('5')])])]), ]); diff --git a/frontend/src/container/TraceFlameGraph/__tests__/TraceFlameGraph.test.tsx b/frontend/src/container/TraceFlameGraph/__tests__/TraceFlameGraph.test.tsx index 143d0806fa..d681d47761 100644 --- a/frontend/src/container/TraceFlameGraph/__tests__/TraceFlameGraph.test.tsx +++ b/frontend/src/container/TraceFlameGraph/__tests__/TraceFlameGraph.test.tsx @@ -5,7 +5,7 @@ import { render, renderHook } from '@testing-library/react'; import TraceFlameGraph from 'container/TraceFlameGraph'; import store from 'store'; -test('loads and displays greeting', () => { +it('loads and displays greeting', () => { const { rerender } = renderHook(() => useState('')); const { asFragment } = render( diff --git a/frontend/src/container/TriggeredAlerts/__tests__/NoFilterTable.test.tsx b/frontend/src/container/TriggeredAlerts/__tests__/NoFilterTable.test.tsx index 18256fddb5..f3103f762e 100644 --- a/frontend/src/container/TriggeredAlerts/__tests__/NoFilterTable.test.tsx +++ b/frontend/src/container/TriggeredAlerts/__tests__/NoFilterTable.test.tsx @@ -67,7 +67,7 @@ describe('NoFilterTable', () => { if (severityHeader) { const initialRows = screen.getAllByRole('row'); - expect(initialRows.length).toBe(4); + expect(initialRows).toHaveLength(4); expect(initialRows[1]).toHaveTextContent('Alert B'); expect(initialRows[2]).toHaveTextContent('Alert C'); expect(initialRows[3]).toHaveTextContent('Alert A'); @@ -75,7 +75,7 @@ describe('NoFilterTable', () => { fireEvent.click(severityHeader); const sortedRows = screen.getAllByRole('row'); - expect(sortedRows.length).toBe(4); + expect(sortedRows).toHaveLength(4); expect(sortedRows[1]).toHaveTextContent('Alert A'); expect(sortedRows[2]).toHaveTextContent('Alert B'); expect(sortedRows[3]).toHaveTextContent('Alert C'); diff --git a/frontend/src/container/TriggeredAlerts/__tests__/utils.test.tsx b/frontend/src/container/TriggeredAlerts/__tests__/utils.test.tsx index e042bc9522..4863ba5563 100644 --- a/frontend/src/container/TriggeredAlerts/__tests__/utils.test.tsx +++ b/frontend/src/container/TriggeredAlerts/__tests__/utils.test.tsx @@ -29,7 +29,7 @@ describe('FilterAlerts', () => { const result = FilterAlerts(alerts, filters); - expect(result).toEqual([criticalAlert]); + expect(result).toStrictEqual([criticalAlert]); }); it('includes alerts when any filter matches', () => { @@ -54,7 +54,7 @@ describe('FilterAlerts', () => { const result = FilterAlerts(alerts, filters); expect(result).toHaveLength(2); - expect(result).toEqual([severityAlert, teamAlert]); + expect(result).toStrictEqual([severityAlert, teamAlert]); }); it('matches labels even when filters contain surrounding whitespace', () => { @@ -67,7 +67,7 @@ describe('FilterAlerts', () => { const result = FilterAlerts(alerts, filters); - expect(result).toEqual([alert]); + expect(result).toStrictEqual([alert]); }); it('ignores filters that do not contain a key/value delimiter', () => { @@ -80,6 +80,6 @@ describe('FilterAlerts', () => { const result = FilterAlerts(alerts, filters); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); }); diff --git a/frontend/src/hooks/__tests__/useUrlQueryData.test.tsx b/frontend/src/hooks/__tests__/useUrlQueryData.test.tsx index 1f9d146ec7..fb8909e28a 100644 --- a/frontend/src/hooks/__tests__/useUrlQueryData.test.tsx +++ b/frontend/src/hooks/__tests__/useUrlQueryData.test.tsx @@ -42,35 +42,35 @@ describe('useUrlQueryData', () => { }; describe('query parsing', () => { - test('should parse valid JSON query parameter', () => { + it('should parse valid JSON query parameter', () => { const testData = { name: 'test', value: 123 }; const { result } = renderHookWithRouter('testKey', {}, [ `/test?testKey=${encodeURIComponent(JSON.stringify(testData))}`, ]); expect(result.current.query).toBe(JSON.stringify(testData)); - expect(result.current.queryData).toEqual(testData); + expect(result.current.queryData).toStrictEqual(testData); }); - test('should return default data when query parameter is not present', () => { + it('should return default data when query parameter is not present', () => { const defaultData = { default: 'value' }; const { result } = renderHookWithRouter('testKey', defaultData); expect(result.current.query).toBeNull(); - expect(result.current.queryData).toEqual(defaultData); + expect(result.current.queryData).toStrictEqual(defaultData); }); - test('should return default data when query parameter is empty', () => { + it('should return default data when query parameter is empty', () => { const defaultData = { default: 'value' }; const { result } = renderHookWithRouter('testKey', defaultData, [ '/test?testKey=', ]); expect(result.current.query).toBe(''); - expect(result.current.queryData).toEqual(defaultData); + expect(result.current.queryData).toStrictEqual(defaultData); }); - test('should handle invalid JSON and return default data', () => { + it('should handle invalid JSON and return default data', () => { const defaultData = { default: 'value' }; const consoleSpy = jest.spyOn(console, 'warn').mockImplementation(); @@ -79,7 +79,7 @@ describe('useUrlQueryData', () => { ]); expect(result.current.query).toBe('invalid-json'); - expect(result.current.queryData).toEqual(defaultData); + expect(result.current.queryData).toStrictEqual(defaultData); expect(consoleSpy).toHaveBeenCalledWith( 'Failed to parse query as JSON:', 'invalid-json', @@ -89,7 +89,7 @@ describe('useUrlQueryData', () => { consoleSpy.mockRestore(); }); - test('should handle malformed JSON and return default data', () => { + it('should handle malformed JSON and return default data', () => { const defaultData = { default: 'value' }; const consoleSpy = jest.spyOn(console, 'warn').mockImplementation(); @@ -100,13 +100,13 @@ describe('useUrlQueryData', () => { ); expect(result.current.query).toBe('{"name":"test",}'); - expect(result.current.queryData).toEqual(defaultData); + expect(result.current.queryData).toStrictEqual(defaultData); expect(consoleSpy).toHaveBeenCalled(); consoleSpy.mockRestore(); }); - test('should handle complex nested objects', () => { + it('should handle complex nested objects', () => { const complexData = { users: [ { id: 1, name: 'John', settings: { theme: 'dark', notifications: true } }, @@ -127,10 +127,10 @@ describe('useUrlQueryData', () => { ]); expect(result.current.query).toBe(JSON.stringify(complexData)); - expect(result.current.queryData).toEqual(complexData); + expect(result.current.queryData).toStrictEqual(complexData); }); - test('should handle primitive values', () => { + it('should handle primitive values', () => { const stringData = 'simple string'; const { result } = renderHookWithRouter('stringKey', '', [ `/test?stringKey=${encodeURIComponent(JSON.stringify(stringData))}`, @@ -142,7 +142,7 @@ describe('useUrlQueryData', () => { }); describe('redirectWithQuery', () => { - test('should navigate with new query data', () => { + it('should navigate with new query data', () => { const { result } = renderHookWithRouter('testKey', {}); const newData = { name: 'new', value: 456 }; @@ -159,7 +159,7 @@ describe('useUrlQueryData', () => { expect(urlParams.get('testKey')).toBe(JSON.stringify(newData)); }); - test('should preserve existing query parameters when adding new one', () => { + it('should preserve existing query parameters when adding new one', () => { const { result } = renderHookWithRouter('newKey', {}, [ '/test?existingKey=existingValue', ]); @@ -176,7 +176,7 @@ describe('useUrlQueryData', () => { expect(urlParams.get('newKey')).toBe(JSON.stringify(newData)); }); - test('should update existing query parameter', () => { + it('should update existing query parameter', () => { const initialData = { name: 'old' }; const { result } = renderHookWithRouter('testKey', {}, [ `/test?testKey=${encodeURIComponent(JSON.stringify(initialData))}`, @@ -192,7 +192,7 @@ describe('useUrlQueryData', () => { expect(urlParams.get('testKey')).toBe(JSON.stringify(newData)); }); - test('should handle complex data in redirectWithQuery', () => { + it('should handle complex data in redirectWithQuery', () => { const { result } = renderHookWithRouter('complexKey', {}); const complexData = { @@ -212,7 +212,7 @@ describe('useUrlQueryData', () => { expect(urlParams.get('complexKey')).toBe(JSON.stringify(complexData)); }); - test('should handle primitive values in redirectWithQuery', () => { + it('should handle primitive values in redirectWithQuery', () => { const { result } = renderHookWithRouter('primitiveKey', ''); act(() => { @@ -224,7 +224,7 @@ describe('useUrlQueryData', () => { expect(urlParams.get('primitiveKey')).toBe(JSON.stringify('simple string')); }); - test('should handle null and undefined values', () => { + it('should handle null and undefined values', () => { const { result } = renderHookWithRouter('nullKey', {}); act(() => { @@ -246,7 +246,7 @@ describe('useUrlQueryData', () => { }); describe('hook interface', () => { - test('should return correct interface structure', () => { + it('should return correct interface structure', () => { const { result } = renderHookWithRouter('testKey', {}); expect(result.current).toHaveProperty('query'); @@ -255,7 +255,7 @@ describe('useUrlQueryData', () => { expect(typeof result.current.redirectWithQuery).toBe('function'); }); - test('should handle different query keys', () => { + it('should handle different query keys', () => { const { result: result1 } = renderHookWithRouter('key1', {}); const { result: result2 } = renderHookWithRouter('key2', {}); @@ -275,7 +275,7 @@ describe('useUrlQueryData', () => { }); describe('URL encoding/decoding', () => { - test('should handle URL encoded query parameters', () => { + it('should handle URL encoded query parameters', () => { const testData = { name: 'test with spaces', value: 'special&chars' }; const encodedData = encodeURIComponent(JSON.stringify(testData)); @@ -283,10 +283,10 @@ describe('useUrlQueryData', () => { `/test?testKey=${encodedData}`, ]); - expect(result.current.queryData).toEqual(testData); + expect(result.current.queryData).toStrictEqual(testData); }); - test('should properly encode data in redirectWithQuery', () => { + it('should properly encode data in redirectWithQuery', () => { const { result } = renderHookWithRouter('testKey', {}); const testData = { name: 'test with spaces', value: 'special&chars' }; @@ -299,7 +299,7 @@ describe('useUrlQueryData', () => { const decodedValue = JSON.parse( decodeURIComponent(urlParams.get('testKey') || ''), ); - expect(decodedValue).toEqual(testData); + expect(decodedValue).toStrictEqual(testData); }); }); }); diff --git a/frontend/src/hooks/dashboard/__test__/useDashboardsListQueryParams.test.ts b/frontend/src/hooks/dashboard/__test__/useDashboardsListQueryParams.test.ts index aa2e3dcbb0..a51c969711 100644 --- a/frontend/src/hooks/dashboard/__test__/useDashboardsListQueryParams.test.ts +++ b/frontend/src/hooks/dashboard/__test__/useDashboardsListQueryParams.test.ts @@ -43,7 +43,7 @@ describe('useDashboardsListQueryParams', () => { wrapper: createTestWrapper('/dashboard'), }); - expect(result.current.dashboardsListQueryParams).toEqual({ + expect(result.current.dashboardsListQueryParams).toStrictEqual({ columnKey: 'updatedAt', order: 'descend', page: '1', @@ -58,7 +58,7 @@ describe('useDashboardsListQueryParams', () => { ), }); - expect(result.current.dashboardsListQueryParams).toEqual({ + expect(result.current.dashboardsListQueryParams).toStrictEqual({ columnKey: 'createdAt', order: 'ascend', page: '3', @@ -116,7 +116,7 @@ describe('useDashboardsListQueryParams', () => { result.current.updateDashboardsListQueryParams(updated); }); - expect(result.current.dashboardsListQueryParams).toEqual(updated); + expect(result.current.dashboardsListQueryParams).toStrictEqual(updated); }); it('does not update state when params are identical', () => { @@ -181,7 +181,7 @@ describe('useDashboardsListQueryParams', () => { const storedParams = Object.fromEntries( new URLSearchParams(stored || '').entries(), ); - expect(storedParams).toEqual(updated); + expect(storedParams).toStrictEqual(updated); }); it('still calls safeNavigate even when params are unchanged', () => { diff --git a/frontend/src/hooks/dashboard/__test__/useTransformDashboardVariables.test.tsx b/frontend/src/hooks/dashboard/__test__/useTransformDashboardVariables.test.tsx index 01e2615123..d23a733959 100644 --- a/frontend/src/hooks/dashboard/__test__/useTransformDashboardVariables.test.tsx +++ b/frontend/src/hooks/dashboard/__test__/useTransformDashboardVariables.test.tsx @@ -292,7 +292,7 @@ describe('useTransformDashboardVariables', () => { const result = transformDashboardVariables(dashboard); - expect(result.data.variables.v1.selectedValue).toEqual(['prod']); + expect(result.data.variables.v1.selectedValue).toStrictEqual(['prod']); }); it('looks up URL variable by variable id when name is absent', () => { @@ -317,7 +317,7 @@ describe('useTransformDashboardVariables', () => { const result = transformDashboardVariables(dashboard); - expect(result.data.variables).toEqual({}); + expect(result.data.variables).toStrictEqual({}); }); it('does not mutate the original dashboard', () => { diff --git a/frontend/src/hooks/dashboard/__test__/useVariablesFromUrl.test.tsx b/frontend/src/hooks/dashboard/__test__/useVariablesFromUrl.test.tsx index 32dd6cd18c..40b8300fd7 100644 --- a/frontend/src/hooks/dashboard/__test__/useVariablesFromUrl.test.tsx +++ b/frontend/src/hooks/dashboard/__test__/useVariablesFromUrl.test.tsx @@ -20,7 +20,7 @@ describe('useVariablesFromUrl', () => { ), }); - expect(result.current.getUrlVariables()).toEqual({}); + expect(result.current.getUrlVariables()).toStrictEqual({}); }); it('should correctly parse variables from URL', () => { @@ -41,7 +41,7 @@ describe('useVariablesFromUrl', () => { ), }); - expect(result.current.getUrlVariables()).toEqual(mockVariables); + expect(result.current.getUrlVariables()).toStrictEqual(mockVariables); }); it('should handle malformed URL parameters gracefully', () => { @@ -56,7 +56,7 @@ describe('useVariablesFromUrl', () => { }); // Should return empty object when JSON parsing fails - expect(result.current.getUrlVariables()).toEqual({}); + expect(result.current.getUrlVariables()).toStrictEqual({}); }); it('should set variables to URL correctly', () => { @@ -84,7 +84,7 @@ describe('useVariablesFromUrl', () => { const urlVariables = searchParams.get(QueryParams.variables); expect(urlVariables).toBeTruthy(); - expect(JSON.parse(decodeURIComponent(urlVariables || ''))).toEqual( + expect(JSON.parse(decodeURIComponent(urlVariables || ''))).toStrictEqual( mockVariables, ); }); @@ -140,8 +140,8 @@ describe('useVariablesFromUrl', () => { // Check if only the specified variable was updated const updatedVariables = result.current.getUrlVariables(); - expect(updatedVariables.var1).toEqual(newValue); - expect(updatedVariables.var2).toEqual(initialVariables.var2); + expect(updatedVariables.var1).toStrictEqual(newValue); + expect(updatedVariables.var2).toStrictEqual(initialVariables.var2); }); it('should preserve other URL parameters when updating variables', () => { @@ -194,8 +194,8 @@ describe('useVariablesFromUrl', () => { expect(urlVariables.stringVar).toBe('production'); expect(urlVariables.numberVar).toBe(123); expect(urlVariables.booleanVar).toBe(true); - expect(urlVariables.arrayVar).toEqual(['service1', 'service2']); - expect(urlVariables.mixedArrayVar).toEqual(['string', 456, false]); + expect(urlVariables.arrayVar).toStrictEqual(['service1', 'service2']); + expect(urlVariables.mixedArrayVar).toStrictEqual(['string', 456, false]); expect(urlVariables.nullVar).toBeNull(); }); @@ -221,8 +221,8 @@ describe('useVariablesFromUrl', () => { const urlVariables = result.current.getUrlVariables(); expect(urlVariables.emptyString).toBe(''); - expect(urlVariables.emptyArray).toEqual([]); - expect(urlVariables.singleItemArray).toEqual(['solo']); + expect(urlVariables.emptyArray).toStrictEqual([]); + expect(urlVariables.singleItemArray).toStrictEqual(['solo']); expect(urlVariables.undefinedVar).toBeUndefined(); }); @@ -243,7 +243,7 @@ describe('useVariablesFromUrl', () => { // Should parse successfully via the raw fallback, not throw or return {} const vars = result.current.getUrlVariables(); - expect(vars).toEqual({ threshold: '50%' }); + expect(vars).toStrictEqual({ threshold: '50%' }); }); it('should return empty object when URL param is completely unparseable', () => { @@ -258,7 +258,7 @@ describe('useVariablesFromUrl', () => { ), }); - expect(result.current.getUrlVariables()).toEqual({}); + expect(result.current.getUrlVariables()).toStrictEqual({}); }); it('should update variables with array values correctly', () => { @@ -283,6 +283,6 @@ describe('useVariablesFromUrl', () => { }); const updatedVariables = result.current.getUrlVariables(); - expect(updatedVariables.multiSelectVar).toEqual(arrayValue); + expect(updatedVariables.multiSelectVar).toStrictEqual(arrayValue); }); }); diff --git a/frontend/src/hooks/queryBuilder/useIsValidTag.test.ts b/frontend/src/hooks/queryBuilder/useIsValidTag.test.ts index 47ab280e81..6797e7b34d 100644 --- a/frontend/src/hooks/queryBuilder/useIsValidTag.test.ts +++ b/frontend/src/hooks/queryBuilder/useIsValidTag.test.ts @@ -3,7 +3,7 @@ import { renderHook } from '@testing-library/react'; import { useIsValidTag } from './useIsValidTag'; describe('useIsValidTag', () => { - test('returns correct validation result for SINGLE_VALUE operator type', () => { + it('returns correct validation result for SINGLE_VALUE operator type', () => { const { result } = renderHook(() => useIsValidTag('SINGLE_VALUE', 1)); expect(result.current).toBe(true); @@ -13,7 +13,7 @@ describe('useIsValidTag', () => { expect(result2.current).toBe(false); }); - test('returns correct validation result for MULTIPLY_VALUE operator type', () => { + it('returns correct validation result for MULTIPLY_VALUE operator type', () => { const { result } = renderHook(() => useIsValidTag('MULTIPLY_VALUE', 1)); expect(result.current).toBe(true); @@ -23,7 +23,7 @@ describe('useIsValidTag', () => { expect(result2.current).toBe(false); }); - test('returns correct validation result for NON_VALUE operator type', () => { + it('returns correct validation result for NON_VALUE operator type', () => { const { result } = renderHook(() => useIsValidTag('NON_VALUE', 0)); expect(result.current).toBe(true); @@ -31,7 +31,7 @@ describe('useIsValidTag', () => { expect(result2.current).toBe(false); }); - test('returns correct validation result for NOT_VALID operator type', () => { + it('returns correct validation result for NOT_VALID operator type', () => { const { result } = renderHook(() => useIsValidTag('NOT_VALID', 1)); expect(result.current).toBe(false); diff --git a/frontend/src/hooks/useAuthZ/useAuthZ.test.tsx b/frontend/src/hooks/useAuthZ/useAuthZ.test.tsx index 2bacb87b8c..5d2198dd87 100644 --- a/frontend/src/hooks/useAuthZ/useAuthZ.test.tsx +++ b/frontend/src/hooks/useAuthZ/useAuthZ.test.tsx @@ -70,7 +70,7 @@ describe('useAuthZ', () => { }); expect(result.current.error).toBeNull(); - expect(result.current.permissions).toEqual(expectedResponse); + expect(result.current.permissions).toStrictEqual(expectedResponse); }); it('should handle API errors', async () => { @@ -134,7 +134,7 @@ describe('useAuthZ', () => { }); expect(requestCount).toBe(1); - expect(result.current.permissions).toEqual({ + expect(result.current.permissions).toStrictEqual({ [permission1]: { isGranted: true, }, @@ -147,7 +147,7 @@ describe('useAuthZ', () => { }); expect(requestCount).toBe(2); - expect(result.current.permissions).toEqual({ + expect(result.current.permissions).toStrictEqual({ [permission1]: { isGranted: true, }, @@ -213,7 +213,7 @@ describe('useAuthZ', () => { expect(result.current.isLoading).toBe(false); expect(result.current.error).toBeNull(); - expect(result.current.permissions).toEqual({}); + expect(result.current.permissions).toStrictEqual({}); }); it('should send correct payload format to API', async () => { @@ -317,13 +317,13 @@ describe('useAuthZ', () => { object: { resource: { name: 'dashboard' }, selector: '456' }, }); - expect(result1.current.permissions).toEqual({ + expect(result1.current.permissions).toStrictEqual({ [permission1]: { isGranted: true }, }); - expect(result2.current.permissions).toEqual({ + expect(result2.current.permissions).toStrictEqual({ [permission2]: { isGranted: false }, }); - expect(result3.current.permissions).toEqual({ + expect(result3.current.permissions).toStrictEqual({ [permission3]: { isGranted: true }, }); }); @@ -427,7 +427,7 @@ describe('useAuthZ', () => { expect(result.current.isLoading).toBe(false); }); - expect(result.current.permissions).toEqual({ + expect(result.current.permissions).toStrictEqual({ [permission1]: { isGranted: true }, [permission2]: { isGranted: false }, [permission3]: { isGranted: true }, @@ -466,7 +466,7 @@ describe('useAuthZ', () => { ); expect(requestCount).toBe(1); - expect(result1.current.permissions).toEqual({ + expect(result1.current.permissions).toStrictEqual({ [permission1]: { isGranted: true }, }); @@ -484,10 +484,10 @@ describe('useAuthZ', () => { ); expect(requestCount).toBe(2); - expect(result1.current.permissions).toEqual({ + expect(result1.current.permissions).toStrictEqual({ [permission1]: { isGranted: true }, }); - expect(result2.current.permissions).toEqual({ + expect(result2.current.permissions).toStrictEqual({ [permission2]: { isGranted: false }, }); expect(result1.current.permissions).not.toHaveProperty(permission2); diff --git a/frontend/src/hooks/useComponentPermission.test.ts b/frontend/src/hooks/useComponentPermission.test.ts index 17fcc5b921..d513835c97 100644 --- a/frontend/src/hooks/useComponentPermission.test.ts +++ b/frontend/src/hooks/useComponentPermission.test.ts @@ -45,7 +45,7 @@ describe('useComponentPermission', () => { true, // edit_widget true, // add_panel ]; - expect(result.current).toEqual(expectedResult); + expect(result.current).toStrictEqual(expectedResult); }); it('should return correct permissions for EDITOR role', () => { @@ -70,7 +70,7 @@ describe('useComponentPermission', () => { true, // edit_widget true, // add_panel ]; - expect(result.current).toEqual(expectedResult); + expect(result.current).toStrictEqual(expectedResult); }); it('should return correct permissions for VIEWER role', () => { @@ -95,6 +95,6 @@ describe('useComponentPermission', () => { false, // edit_widget false, // add_panel ]; - expect(result.current).toEqual(expectedResult); + expect(result.current).toStrictEqual(expectedResult); }); }); diff --git a/frontend/src/hooks/useGetQueryLabels.test.ts b/frontend/src/hooks/useGetQueryLabels.test.ts index 4e2dfcb53c..17000b30ef 100644 --- a/frontend/src/hooks/useGetQueryLabels.test.ts +++ b/frontend/src/hooks/useGetQueryLabels.test.ts @@ -42,7 +42,7 @@ describe('useGetQueryLabels', () => { const { result } = renderHook(() => useGetQueryLabels(query)); - expect(result.current).toEqual([]); + expect(result.current).toStrictEqual([]); }); it('returns formula labels when queryFormulas is populated', () => { @@ -60,7 +60,7 @@ describe('useGetQueryLabels', () => { const { result } = renderHook(() => useGetQueryLabels(query)); - expect(result.current).toEqual([ + expect(result.current).toStrictEqual([ { label: 'F1', value: 'F1' }, { label: 'F2', value: 'F2' }, ]); @@ -76,7 +76,7 @@ describe('useGetQueryLabels', () => { const { result } = renderHook(() => useGetQueryLabels(query)); - expect(result.current).toEqual([]); + expect(result.current).toStrictEqual([]); }); it('returns labels from clickhouse_sql when populated', () => { @@ -90,7 +90,7 @@ describe('useGetQueryLabels', () => { const { result } = renderHook(() => useGetQueryLabels(query)); - expect(result.current).toEqual([ + expect(result.current).toStrictEqual([ { label: 'query_a', value: 'query_a' }, { label: 'query_b', value: 'query_b' }, ]); @@ -106,7 +106,7 @@ describe('useGetQueryLabels', () => { const { result } = renderHook(() => useGetQueryLabels(query)); - expect(result.current).toEqual([]); + expect(result.current).toStrictEqual([]); }); it('returns labels from promql when populated', () => { @@ -120,7 +120,7 @@ describe('useGetQueryLabels', () => { const { result } = renderHook(() => useGetQueryLabels(query)); - expect(result.current).toEqual([ + expect(result.current).toStrictEqual([ { label: 'prom_1', value: 'prom_1' }, { label: 'prom_2', value: 'prom_2' }, ]); diff --git a/frontend/src/hooks/useInterval.test.ts b/frontend/src/hooks/useInterval.test.ts index c6626b2224..7ab80c5ecc 100644 --- a/frontend/src/hooks/useInterval.test.ts +++ b/frontend/src/hooks/useInterval.test.ts @@ -5,7 +5,7 @@ import useInterval from './useInterval'; jest.useFakeTimers(); describe('useInterval', () => { - test('calls the callback with a given delay', () => { + it('calls the callback with a given delay', () => { const callback = jest.fn(); const delay = 1000; @@ -26,7 +26,7 @@ describe('useInterval', () => { expect(callback).toHaveBeenCalledTimes(2); }); - test('does not call the callback if not enabled', () => { + it('does not call the callback if not enabled', () => { const callback = jest.fn(); const delay = 1000; const enabled = false; @@ -40,7 +40,7 @@ describe('useInterval', () => { expect(callback).toHaveBeenCalledTimes(0); }); - test('cleans up the interval when unmounted', () => { + it('cleans up the interval when unmounted', () => { const callback = jest.fn(); const delay = 1000; @@ -61,7 +61,7 @@ describe('useInterval', () => { expect(callback).toHaveBeenCalledTimes(1); }); - test('updates the interval when delay changes', () => { + it('updates the interval when delay changes', () => { const callback = jest.fn(); const initialDelay = 1000; const newDelay = 2000; diff --git a/frontend/src/hooks/usePreviousValue.test.tsx b/frontend/src/hooks/usePreviousValue.test.tsx index efc3073af7..d3f5035abe 100644 --- a/frontend/src/hooks/usePreviousValue.test.tsx +++ b/frontend/src/hooks/usePreviousValue.test.tsx @@ -3,7 +3,7 @@ import { renderHook } from '@testing-library/react'; import usePreviousValue from './usePreviousValue'; describe('usePreviousValue', () => { - test('returns the previous value of a given variable', () => { + it('returns the previous value of a given variable', () => { const { result, rerender } = renderHook( ({ value }) => usePreviousValue(value), { @@ -23,7 +23,7 @@ describe('usePreviousValue', () => { expect(result.current).toBe(2); }); - test('works with different types of values', () => { + it('works with different types of values', () => { const { result, rerender } = renderHook( ({ value }) => usePreviousValue(value), { diff --git a/frontend/src/hooks/useResourceAttribute/__tests__/whitelistedKeys.test.ts b/frontend/src/hooks/useResourceAttribute/__tests__/whitelistedKeys.test.ts index ccb92f9b0e..6b4d90dc56 100644 --- a/frontend/src/hooks/useResourceAttribute/__tests__/whitelistedKeys.test.ts +++ b/frontend/src/hooks/useResourceAttribute/__tests__/whitelistedKeys.test.ts @@ -48,7 +48,7 @@ describe('useResourceAttribute config', () => { dotNotationFilters, ); expect(result).toHaveLength(3); - expect(result).toEqual(dotNotationFilters); + expect(result).toStrictEqual(dotNotationFilters); }); it('should keep underscore-notation filters on the Service Map route', () => { @@ -57,21 +57,21 @@ describe('useResourceAttribute config', () => { underscoreNotationFilters, ); expect(result).toHaveLength(3); - expect(result).toEqual(underscoreNotationFilters); + expect(result).toStrictEqual(underscoreNotationFilters); }); it('should filter out non-whitelisted keys on the Service Map route', () => { const allFilters = [...dotNotationFilters, ...nonWhitelistedFilters]; const result = mappingWithRoutesAndKeys(ROUTES.SERVICE_MAP, allFilters); expect(result).toHaveLength(3); - expect(result).toEqual(dotNotationFilters); + expect(result).toStrictEqual(dotNotationFilters); }); it('should return all filters on non-Service Map routes', () => { const allFilters = [...dotNotationFilters, ...nonWhitelistedFilters]; const result = mappingWithRoutesAndKeys('/services', allFilters); expect(result).toHaveLength(5); - expect(result).toEqual(allFilters); + expect(result).toStrictEqual(allFilters); }); }); }); diff --git a/frontend/src/hooks/useUrlQuery.test.tsx b/frontend/src/hooks/useUrlQuery.test.tsx index ac261580f2..f599420d29 100644 --- a/frontend/src/hooks/useUrlQuery.test.tsx +++ b/frontend/src/hooks/useUrlQuery.test.tsx @@ -5,7 +5,7 @@ import { createMemoryHistory } from 'history'; import useUrlQuery from './useUrlQuery'; describe('useUrlQuery', () => { - test('returns URLSearchParams object for the current URL search', () => { + it('returns URLSearchParams object for the current URL search', () => { const history = createMemoryHistory({ initialEntries: ['/test?param1=value1¶m2=value2'], }); @@ -18,7 +18,7 @@ describe('useUrlQuery', () => { expect(result.current.get('param2')).toBe('value2'); }); - test('updates URLSearchParams object when URL search changes', () => { + it('updates URLSearchParams object when URL search changes', () => { const history = createMemoryHistory({ initialEntries: ['/test?param1=value1'], }); @@ -28,7 +28,7 @@ describe('useUrlQuery', () => { }); expect(result.current.get('param1')).toBe('value1'); - expect(result.current.get('param2')).toBe(null); + expect(result.current.get('param2')).toBeNull(); act(() => { history.push('/test?param1=newValue1¶m2=value2'); @@ -40,7 +40,7 @@ describe('useUrlQuery', () => { expect(result.current.get('param2')).toBe('value2'); }); - test('returns empty URLSearchParams object when no query parameters are present', () => { + it('returns empty URLSearchParams object when no query parameters are present', () => { const history = createMemoryHistory({ initialEntries: ['/test'], }); @@ -50,7 +50,7 @@ describe('useUrlQuery', () => { }); expect(result.current.toString()).toBe(''); - expect(result.current.get('param1')).toBe(null); - expect(result.current.get('param2')).toBe(null); + expect(result.current.get('param1')).toBeNull(); + expect(result.current.get('param2')).toBeNull(); }); }); diff --git a/frontend/src/lib/__tests__/getStep.test.ts b/frontend/src/lib/__tests__/getStep.test.ts index c799bfa9e0..d0a04ac35a 100644 --- a/frontend/src/lib/__tests__/getStep.test.ts +++ b/frontend/src/lib/__tests__/getStep.test.ts @@ -2,7 +2,7 @@ import dayjs from 'dayjs'; import getStep, { DefaultStepSize, MaxDataPoints } from 'lib/getStep'; describe('lib/getStep', () => { - test('should return default step when the given range is less than 1 day', () => { + it('should return default step when the given range is less than 1 day', () => { const start = dayjs(); const end = start.add(1, 'hour'); const startUnix = start.valueOf(); @@ -14,7 +14,7 @@ describe('lib/getStep', () => { end: endUnix / 1e3, inputFormat: 's', }), - ).toEqual(DefaultStepSize); + ).toStrictEqual(DefaultStepSize); expect( getStep({ @@ -22,7 +22,7 @@ describe('lib/getStep', () => { end: endUnix, inputFormat: 'ms', }), - ).toEqual(DefaultStepSize); + ).toStrictEqual(DefaultStepSize); expect( getStep({ @@ -30,10 +30,10 @@ describe('lib/getStep', () => { end: endUnix * 1e6, inputFormat: 'ns', }), - ).toEqual(DefaultStepSize); + ).toStrictEqual(DefaultStepSize); }); - test('should return relevant step when the given range is greater than 1 day', () => { + it('should return relevant step when the given range is greater than 1 day', () => { const start = dayjs(); const end = start.add(1, 'day').add(1, 'second'); const startUnix = start.valueOf(); @@ -52,7 +52,7 @@ describe('lib/getStep', () => { end: endUnix / 1e3, inputFormat: 's', }), - ).toEqual(expectedStepSize); + ).toStrictEqual(expectedStepSize); expect( getStep({ @@ -60,7 +60,7 @@ describe('lib/getStep', () => { end: endUnix, inputFormat: 'ms', }), - ).toEqual(expectedStepSize); + ).toStrictEqual(expectedStepSize); expect( getStep({ @@ -68,6 +68,6 @@ describe('lib/getStep', () => { end: endUnix * 1e6, inputFormat: 'ns', }), - ).toEqual(expectedStepSize); + ).toStrictEqual(expectedStepSize); }); }); diff --git a/frontend/src/lib/__tests__/logql/parser.test.ts b/frontend/src/lib/__tests__/logql/parser.test.ts index 56f8bcb16a..38c0b9a4fb 100644 --- a/frontend/src/lib/__tests__/logql/parser.test.ts +++ b/frontend/src/lib/__tests__/logql/parser.test.ts @@ -2,9 +2,9 @@ import { logqlQueries } from 'lib/__fixtures__/logql'; import parser from 'lib/logql/parser'; describe('lib/logql/parser', () => { - test('parse valid queries', () => { + it('parse valid queries', () => { logqlQueries.forEach((queryObject) => { - expect(parser(queryObject.query)).toEqual(queryObject.parsedQuery); + expect(parser(queryObject.query)).toStrictEqual(queryObject.parsedQuery); }); }); }); diff --git a/frontend/src/lib/__tests__/logql/reverseParser.test.ts b/frontend/src/lib/__tests__/logql/reverseParser.test.ts index 1d5d8a6145..69e6a52603 100644 --- a/frontend/src/lib/__tests__/logql/reverseParser.test.ts +++ b/frontend/src/lib/__tests__/logql/reverseParser.test.ts @@ -2,9 +2,11 @@ import { logqlQueries } from 'lib/__fixtures__/logql'; import { reverseParser } from 'lib/logql/reverseParser'; describe('lib/logql/reverseParser', () => { - test('reverse parse valid queries', () => { + it('reverse parse valid queries', () => { logqlQueries.forEach((queryObject) => { - expect(reverseParser(queryObject.parsedQuery)).toEqual(queryObject.query); + expect(reverseParser(queryObject.parsedQuery)).toStrictEqual( + queryObject.query, + ); }); }); }); diff --git a/frontend/src/lib/__tests__/logql/splitter.test.ts b/frontend/src/lib/__tests__/logql/splitter.test.ts index 4a9e2b4684..13900056f9 100644 --- a/frontend/src/lib/__tests__/logql/splitter.test.ts +++ b/frontend/src/lib/__tests__/logql/splitter.test.ts @@ -2,9 +2,9 @@ import { logqlQueries } from 'lib/__fixtures__/logql'; import { splitter } from 'lib/logql/splitter'; describe('lib/logql/splitter', () => { - test('splitter valid quereies', () => { + it('splitter valid quereies', () => { logqlQueries.forEach((queryObject) => { - expect(splitter(queryObject.query)).toEqual(queryObject.splitterQuery); + expect(splitter(queryObject.query)).toStrictEqual(queryObject.splitterQuery); }); }); }); diff --git a/frontend/src/lib/dashboardVariables/variableReference.test.ts b/frontend/src/lib/dashboardVariables/variableReference.test.ts index 5a75b3ed51..6cd6339dd1 100644 --- a/frontend/src/lib/dashboardVariables/variableReference.test.ts +++ b/frontend/src/lib/dashboardVariables/variableReference.test.ts @@ -112,7 +112,7 @@ const baseQuery: Query = { describe('extractQueryTextStrings', () => { it('returns empty array for query builder with no data', () => { - expect(extractQueryTextStrings(baseQuery)).toEqual([]); + expect(extractQueryTextStrings(baseQuery)).toStrictEqual([]); }); it('extracts string values from query builder filter items', () => { @@ -137,7 +137,7 @@ describe('extractQueryTextStrings', () => { }; const texts = extractQueryTextStrings(query); - expect(texts).toEqual(['$service_name', 'hardcoded', '$env']); + expect(texts).toStrictEqual(['$service_name', 'hardcoded', '$env']); }); it('extracts filter expression from query builder', () => { @@ -157,7 +157,7 @@ describe('extractQueryTextStrings', () => { }; const texts = extractQueryTextStrings(query); - expect(texts).toEqual(['env = $deployment_environment']); + expect(texts).toStrictEqual(['env = $deployment_environment']); }); it('skips non-string filter values', () => { @@ -178,7 +178,7 @@ describe('extractQueryTextStrings', () => { }, }; - expect(extractQueryTextStrings(query)).toEqual([]); + expect(extractQueryTextStrings(query)).toStrictEqual([]); }); it('extracts promql query strings', () => { @@ -191,7 +191,7 @@ describe('extractQueryTextStrings', () => { ], }; - expect(extractQueryTextStrings(query)).toEqual([ + expect(extractQueryTextStrings(query)).toStrictEqual([ 'up{env="$env"}', 'cpu{ns="$namespace"}', ]); @@ -211,7 +211,7 @@ describe('extractQueryTextStrings', () => { ], }; - expect(extractQueryTextStrings(query)).toEqual([ + expect(extractQueryTextStrings(query)).toStrictEqual([ 'SELECT * WHERE env = {{.env}}', ]); }); @@ -240,7 +240,10 @@ describe('extractQueryTextStrings', () => { }, }; - expect(extractQueryTextStrings(query)).toEqual(['$env', '$service_name']); + expect(extractQueryTextStrings(query)).toStrictEqual([ + '$env', + '$service_name', + ]); }); it('collects both filter items and filter expression from the same queryData', () => { @@ -262,7 +265,7 @@ describe('extractQueryTextStrings', () => { }, }; - expect(extractQueryTextStrings(query)).toEqual([ + expect(extractQueryTextStrings(query)).toStrictEqual([ '$service_name', 'env = $deployment_environment', ]); @@ -278,7 +281,7 @@ describe('extractQueryTextStrings', () => { ], }; - expect(extractQueryTextStrings(query)).toEqual(['up{env="$env"}']); + expect(extractQueryTextStrings(query)).toStrictEqual(['up{env="$env"}']); }); it('skips clickhouse entries with empty query strings', () => { @@ -296,7 +299,7 @@ describe('extractQueryTextStrings', () => { ], }; - expect(extractQueryTextStrings(query)).toEqual([ + expect(extractQueryTextStrings(query)).toStrictEqual([ 'SELECT * WHERE x = {{.env}}', ]); }); @@ -306,7 +309,7 @@ describe('extractQueryTextStrings', () => { ...baseQuery, queryType: 'unknown' as unknown as EQueryType, }; - expect(extractQueryTextStrings(query)).toEqual([]); + expect(extractQueryTextStrings(query)).toStrictEqual([]); }); }); @@ -319,7 +322,9 @@ describe('getVariableReferencesInQuery', () => { ]; it('returns empty array when query has no text', () => { - expect(getVariableReferencesInQuery(baseQuery, variableNames)).toEqual([]); + expect(getVariableReferencesInQuery(baseQuery, variableNames)).toStrictEqual( + [], + ); }); it('detects variables referenced in query builder filters', () => { @@ -344,7 +349,7 @@ describe('getVariableReferencesInQuery', () => { }; const result = getVariableReferencesInQuery(query, variableNames); - expect(result).toEqual(['deployment_environment', 'service_name']); + expect(result).toStrictEqual(['deployment_environment', 'service_name']); }); it('detects variables in promql queries', () => { @@ -363,7 +368,7 @@ describe('getVariableReferencesInQuery', () => { }; const result = getVariableReferencesInQuery(query, variableNames); - expect(result).toEqual(['deployment_environment', 'endpoint']); + expect(result).toStrictEqual(['deployment_environment', 'endpoint']); }); it('detects variables in clickhouse sql queries', () => { @@ -381,7 +386,7 @@ describe('getVariableReferencesInQuery', () => { }; const result = getVariableReferencesInQuery(query, variableNames); - expect(result).toEqual(['service_name']); + expect(result).toStrictEqual(['service_name']); }); it('detects variables spread across multiple queryData entries', () => { @@ -406,7 +411,7 @@ describe('getVariableReferencesInQuery', () => { }; const result = getVariableReferencesInQuery(query, variableNames); - expect(result).toEqual(['deployment_environment', 'service_name']); + expect(result).toStrictEqual(['deployment_environment', 'service_name']); }); it('returns empty array when no variables are referenced', () => { @@ -423,7 +428,7 @@ describe('getVariableReferencesInQuery', () => { ], }; - expect(getVariableReferencesInQuery(query, variableNames)).toEqual([]); + expect(getVariableReferencesInQuery(query, variableNames)).toStrictEqual([]); }); it('returns empty array when variableNames list is empty', () => { @@ -440,6 +445,6 @@ describe('getVariableReferencesInQuery', () => { ], }; - expect(getVariableReferencesInQuery(query, [])).toEqual([]); + expect(getVariableReferencesInQuery(query, [])).toStrictEqual([]); }); }); diff --git a/frontend/src/lib/getRandomColor.test.ts b/frontend/src/lib/getRandomColor.test.ts index 906ac95f06..07226ea7e6 100644 --- a/frontend/src/lib/getRandomColor.test.ts +++ b/frontend/src/lib/getRandomColor.test.ts @@ -5,7 +5,7 @@ import spans from './__fixtures__/getRandomColor'; import { colors, spanServiceNameToColorMapping } from './getRandomColor'; describe('spanServiceNameToColorMapping', () => { - test('should map span services to colors', () => { + it('should map span services to colors', () => { const expectedServiceToColorMap = { serviceA: themeColors.chartcolors.turquoise, serviceB: themeColors.chartcolors.turquoise, @@ -14,16 +14,16 @@ describe('spanServiceNameToColorMapping', () => { const result = spanServiceNameToColorMapping(spans); - expect(result).toEqual(expectedServiceToColorMap); + expect(result).toStrictEqual(expectedServiceToColorMap); }); - test('should return an empty object when input is an empty array', () => { + it('should return an empty object when input is an empty array', () => { const spans: Span[] = []; const expectedServiceToColorMap = {}; const result = spanServiceNameToColorMapping(spans); - expect(result).toEqual(expectedServiceToColorMap); + expect(result).toStrictEqual(expectedServiceToColorMap); }); }); diff --git a/frontend/src/lib/getStep.test.ts b/frontend/src/lib/getStep.test.ts index a70057245f..5f5401e511 100644 --- a/frontend/src/lib/getStep.test.ts +++ b/frontend/src/lib/getStep.test.ts @@ -3,7 +3,7 @@ import dayjs from 'dayjs'; import getStep, { DefaultStepSize, MaxDataPoints } from './getStep'; describe('get dynamic step size', () => { - test('should return default step size if diffSec is less than MaxDataPoints', () => { + it('should return default step size if diffSec is less than MaxDataPoints', () => { const start = dayjs().subtract(1, 'minute').valueOf(); const end = dayjs().valueOf(); @@ -16,7 +16,7 @@ describe('get dynamic step size', () => { expect(step).toBe(DefaultStepSize); }); - test('should return appropriate step size if diffSec is more than MaxDataPoints', () => { + it('should return appropriate step size if diffSec is more than MaxDataPoints', () => { const start = dayjs().subtract(4, 'hour').valueOf(); const end = dayjs().valueOf(); @@ -36,7 +36,7 @@ describe('get dynamic step size', () => { expect(step).toBe(expectedStep); }); - test('should correctly handle different input formats', () => { + it('should correctly handle different input formats', () => { const endSec = dayjs().unix(); const startSec = endSec - 4 * 3600; // 4 hours earlier @@ -66,7 +66,7 @@ describe('get dynamic step size', () => { expect(stepNs).toBe(expectedStep); // Expect the same result as 's' inputFormat }); - test('should throw an error for invalid input format', () => { + it('should throw an error for invalid input format', () => { const start = dayjs().valueOf(); const end = dayjs().valueOf(); @@ -79,7 +79,7 @@ describe('get dynamic step size', () => { }).toThrow('invalid format'); }); - test('should return DefaultStepSize when start and end are the same', () => { + it('should return DefaultStepSize when start and end are the same', () => { const start = dayjs().valueOf(); const end = start; // same as start @@ -92,7 +92,7 @@ describe('get dynamic step size', () => { expect(step).toBe(DefaultStepSize); }); - test('should return DefaultStepSize if diffSec is exactly MaxDataPoints', () => { + it('should return DefaultStepSize if diffSec is exactly MaxDataPoints', () => { const endMs = dayjs().valueOf(); const startMs = endMs - MaxDataPoints * 1000; // exactly MaxDataPoints seconds earlier @@ -105,7 +105,7 @@ describe('get dynamic step size', () => { expect(step).toBe(DefaultStepSize); // since calculated step size is less than DefaultStepSize, it should return DefaultStepSize }); - test('should return DefaultStepSize for future dates less than (MaxDataPoints * DefaultStepSize) seconds ahead', () => { + it('should return DefaultStepSize for future dates less than (MaxDataPoints * DefaultStepSize) seconds ahead', () => { const start = dayjs().valueOf(); const end = start + MaxDataPoints * DefaultStepSize * 1000 - 1; // just one millisecond less than (MaxDataPoints * DefaultStepSize) seconds ahead @@ -118,7 +118,7 @@ describe('get dynamic step size', () => { expect(step).toBe(DefaultStepSize); }); - test('should handle string inputs correctly for a time range greater than (MaxDataPoints * DefaultStepSize) seconds', () => { + it('should handle string inputs correctly for a time range greater than (MaxDataPoints * DefaultStepSize) seconds', () => { const endMs = dayjs().valueOf(); const startMs = endMs - (MaxDataPoints * DefaultStepSize * 1000 + 1); // one millisecond more than (MaxDataPoints * DefaultStepSize) seconds earlier diff --git a/frontend/src/lib/newQueryBuilder/__test__/chooseAutocompleteFromCustomValue.test.ts b/frontend/src/lib/newQueryBuilder/__test__/chooseAutocompleteFromCustomValue.test.ts index aa6bc0388d..9a911b5c35 100644 --- a/frontend/src/lib/newQueryBuilder/__test__/chooseAutocompleteFromCustomValue.test.ts +++ b/frontend/src/lib/newQueryBuilder/__test__/chooseAutocompleteFromCustomValue.test.ts @@ -37,16 +37,16 @@ describe('chooseAutocompleteFromCustomValue', () => { const r1 = chooseAutocompleteFromCustomValue([], 'region', 'number'); const r2 = chooseAutocompleteFromCustomValue([], 'region', 'number'); expect(r1.id).toBeTruthy(); - expect(r1.id).not.toEqual('----'); - expect(r1.id).toEqual(r2.id); - expect(r1.key).toEqual('region'); + expect(r1.id).not.toBe('----'); + expect(r1.id).toStrictEqual(r2.id); + expect(r1.key).toBe('region'); // "number" maps to Float64 in our normalization - expect(r1.dataType).toEqual(DataTypes.Float64); + expect(r1.dataType).toStrictEqual(DataTypes.Float64); }); it('normalizes "number" to Float64', () => { const res = chooseAutocompleteFromCustomValue([], 'latency', 'number'); - expect(res.dataType).toEqual(DataTypes.Float64); + expect(res.dataType).toStrictEqual(DataTypes.Float64); }); it('same key but different dataType returns new object with computed id', () => { @@ -60,7 +60,7 @@ describe('chooseAutocompleteFromCustomValue', () => { { key: 'service.name', dataType: DataTypes.Float64, type: '' }, baseAutoCompleteIdKeysOrder, ); - expect(res).toEqual( + expect(res).toStrictEqual( expect.objectContaining({ key: 'service.name', dataType: DataTypes.Float64, @@ -80,7 +80,7 @@ describe('chooseAutocompleteFromCustomValue', () => { { key: 'unknown_key', dataType: 'unknown' as any, type: '' }, baseAutoCompleteIdKeysOrder, ); - expect(res).toEqual( + expect(res).toStrictEqual( expect.objectContaining({ key: 'unknown_key', dataType: 'unknown', @@ -96,7 +96,7 @@ describe('chooseAutocompleteFromCustomValue', () => { { key: 'undef_key', dataType: DataTypes.EMPTY, type: '' }, baseAutoCompleteIdKeysOrder, ); - expect(res).toEqual( + expect(res).toStrictEqual( expect.objectContaining({ key: 'undef_key', dataType: DataTypes.EMPTY, @@ -108,6 +108,6 @@ describe('chooseAutocompleteFromCustomValue', () => { it('uses empty string as default type when fieldType is not provided', () => { const res = chooseAutocompleteFromCustomValue([], 'env', DataTypes.String); - expect(res.type).toEqual(''); + expect(res.type).toBe(''); }); }); diff --git a/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts b/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts index 64214ed9a3..9d60256a31 100644 --- a/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts +++ b/frontend/src/lib/uPlotLib/plugins/onClickPlugin.ts @@ -37,7 +37,7 @@ function getPreferredSeriesIndex( const top = e.clientY - bbox.top; // Prefer series explicitly marked as focused for (let i = 1; i < u.series.length; i++) { - // @ts-ignore + // @ts-expect-error const isSeriesFocused = u.series[i]?._focus === true; const isSeriesShown = u.series[i].show !== false; const seriesValue = u.data[i]?.[timestampIndex]; diff --git a/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts b/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts index 33d175ad84..196d51bef3 100644 --- a/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts +++ b/frontend/src/lib/uPlotLib/plugins/tooltipPlugin.ts @@ -182,7 +182,7 @@ const generateTooltipContent = ( show: item.show || false, color, label, - // @ts-ignore + // @ts-expect-error focus: item?._focus || false, value, tooltipValue, diff --git a/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts b/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts index b333759a5f..f9bd8748d2 100644 --- a/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts +++ b/frontend/src/lib/uPlotLib/utils/getYAxisScale.test.ts @@ -58,7 +58,7 @@ describe('getYAxisScale', () => { softMax: null, } as GetYAxisScale); - expect(result).toEqual({ auto: true }); + expect(result).toStrictEqual({ auto: true }); }); it('Threshold absent, series data present softmin and softmax present', () => { @@ -70,7 +70,7 @@ describe('getYAxisScale', () => { softMax: mockSoftMax, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [5, 30], }); @@ -85,7 +85,7 @@ describe('getYAxisScale', () => { softMax: null, } as GetYAxisScale); - expect(result).toEqual({ auto: true }); + expect(result).toStrictEqual({ auto: true }); }); it('Threshold absent, series data present, softmin present and softmax absent', () => { @@ -97,7 +97,7 @@ describe('getYAxisScale', () => { softMax: null, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [5, 25], }); @@ -112,7 +112,7 @@ describe('getYAxisScale', () => { softMax: mockSoftMax, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [15, 30], }); @@ -127,7 +127,7 @@ describe('getYAxisScale', () => { softMax: mockSoftMax, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [5, 30], }); @@ -142,7 +142,7 @@ describe('getYAxisScale', () => { softMax: null, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [10, 20], }); @@ -157,7 +157,7 @@ describe('getYAxisScale', () => { softMax: mockSoftMax, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [10, 30], }); @@ -172,7 +172,7 @@ describe('getYAxisScale', () => { softMax: null, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [5, 20], }); @@ -187,7 +187,7 @@ describe('getYAxisScale', () => { softMax: mockSoftMax, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ range: { min: { soft: mockSoftMin, mode: 2 }, max: { soft: mockSoftMax, mode: 2 }, @@ -204,7 +204,7 @@ describe('getYAxisScale', () => { softMax: mockSoftMax, } as GetYAxisScale); - expect(result).toEqual({ + expect(result).toStrictEqual({ auto: false, range: [5, 30], }); diff --git a/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts b/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts index b63bbb62c5..4da35ba3f5 100644 --- a/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts +++ b/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts @@ -8,24 +8,24 @@ import { jest.mock('../getRenderer', () => jest.fn().mockImplementation(() => () => {})); describe('Get Series Data', () => { - test('Should return series data for uplot chart', () => { + it('Should return series data for uplot chart', () => { const seriesData = getSeries(seriesBarChartData); - expect(seriesData.length).toBe(5); + expect(seriesData).toHaveLength(5); expect(seriesData[1].label).toBe('firstLegend'); expect(seriesData[1].show).toBe(true); expect(seriesData[1].fill).toBe('#FF6F91'); expect(seriesData[1].width).toBe(2); }); - test('Should return series drawline bar chart for panel type barchart', () => { + it('Should return series drawline bar chart for panel type barchart', () => { const seriesData = getSeries(seriesBarChartData); - // @ts-ignore + // @ts-expect-error expect(seriesData[1].drawStyle).toBe('bars'); }); - test('Should return seris drawline line chart for panel type time series', () => { + it('Should return seris drawline line chart for panel type time series', () => { const seriesData = getSeries(seriesLineChartData); - // @ts-ignore + // @ts-expect-error expect(seriesData[1].drawStyle).toBe('line'); }); diff --git a/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts b/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts index b1e7a0140e..3d00d0e5a8 100644 --- a/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts +++ b/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts @@ -20,7 +20,7 @@ jest.mock('../getSeriesData', () => ); describe('getUPlotChartOptions', () => { - test('should return uPlot options', () => { + it('should return uPlot options', () => { const options = getUPlotChartOptions(inputPropsTimeSeries); expect(options.legend?.isolate).toBe(true); expect(options.width).toBe(inputPropsTimeSeries.dimensions.width); @@ -28,7 +28,7 @@ describe('getUPlotChartOptions', () => { expect(options.series[1].label).toBe('A'); }); - test('should return enhanced legend options when enabled', () => { + it('should return enhanced legend options when enabled', () => { const options = getUPlotChartOptions({ ...inputPropsTimeSeries, enhancedLegend: true, @@ -40,7 +40,7 @@ describe('getUPlotChartOptions', () => { expect(Array.isArray(options.hooks?.ready)).toBe(true); }); - test('should adjust chart dimensions for right legend position', () => { + it('should adjust chart dimensions for right legend position', () => { const options = getUPlotChartOptions({ ...inputPropsTimeSeries, enhancedLegend: true, @@ -51,7 +51,7 @@ describe('getUPlotChartOptions', () => { expect(options.height).toBe(inputPropsTimeSeries.dimensions.height); }); - test('should adjust chart dimensions for bottom legend position', () => { + it('should adjust chart dimensions for bottom legend position', () => { const options = getUPlotChartOptions({ ...inputPropsTimeSeries, enhancedLegend: true, @@ -62,39 +62,35 @@ describe('getUPlotChartOptions', () => { expect(options.height).toBeLessThan(inputPropsTimeSeries.dimensions.height); }); - test('Should return line chart as drawStyle for time series', () => { + it('Should return line chart as drawStyle for time series', () => { const options = getUPlotChartOptions(inputPropsTimeSeries); - // @ts-ignore + // @ts-expect-error expect(options.series[1].drawStyle).toBe('line'); - // @ts-ignore + // @ts-expect-error expect(options.series[1].lineInterpolation).toBe('spline'); - // @ts-ignore expect(options.series[1].show).toBe(true); expect(options.series[1].label).toBe('A'); expect(options.series[1].stroke).toBe('#6495ED'); expect(options.series[1].width).toBe(2); expect(options.series[1].spanGaps).toBe(true); - // @ts-ignore - expect(options.series[1].points.size).toBe(5); + expect(options.series[1].points?.size).toBe(5); }); - test('should return bar chart as drawStyle for panel type bar', () => { + it('should return bar chart as drawStyle for panel type bar', () => { const options = getUPlotChartOptions({ ...inputPropsTimeSeries, panelType: PANEL_TYPES.BAR, }); - // @ts-ignore + // @ts-expect-error expect(options.series[1].drawStyle).toBe('bars'); - // @ts-ignore - expect(options.series[1].lineInterpolation).toBe(null); - // @ts-ignore + // @ts-expect-error + expect(options.series[1].lineInterpolation).toBeNull(); expect(options.series[1].show).toBe(true); expect(options.series[1].label).toBe('A'); expect(options.series[1].fill).toBe('#6495ED40'); expect(options.series[1].stroke).toBe('#6495ED'); expect(options.series[1].width).toBe(2); expect(options.series[1].spanGaps).toBe(true); - // @ts-ignore - expect(options.series[1].points.size).toBe(5); + expect(options.series[1].points?.size).toBe(5); }); }); diff --git a/frontend/src/lib/uPlotV2/components/Tooltip/__tests__/utils.test.ts b/frontend/src/lib/uPlotV2/components/Tooltip/__tests__/utils.test.ts index df284bb8bf..866ae5b909 100644 --- a/frontend/src/lib/uPlotV2/components/Tooltip/__tests__/utils.test.ts +++ b/frontend/src/lib/uPlotV2/components/Tooltip/__tests__/utils.test.ts @@ -297,7 +297,7 @@ describe('Tooltip utils', () => { decimalPrecision, }); - expect(result.map((item) => item.value)).toEqual([3, 1, 4, 2]); + expect(result.map((item) => item.value)).toStrictEqual([3, 1, 4, 2]); }); }); }); diff --git a/frontend/src/lib/uPlotV2/components/__tests__/UPlotChart.test.tsx b/frontend/src/lib/uPlotV2/components/__tests__/UPlotChart.test.tsx index a760c92f1d..8cd167995f 100644 --- a/frontend/src/lib/uPlotV2/components/__tests__/UPlotChart.test.tsx +++ b/frontend/src/lib/uPlotV2/components/__tests__/UPlotChart.test.tsx @@ -220,8 +220,8 @@ describe('UPlotChart', () => { const [opts] = mockUPlotConstructor.mock.calls[0]; expect(opts.width).toBe(500); expect(opts.height).toBe(300); - expect(opts.axes).toEqual([{ scale: 'y' }]); - expect(opts.cursor).toEqual({ show: true }); + expect(opts.axes).toStrictEqual([{ scale: 'y' }]); + expect(opts.cursor).toStrictEqual({ show: true }); }); it('skips creation when width or height is 0', () => { @@ -346,8 +346,8 @@ describe('UPlotChart', () => { }); const [, receivedData] = mockUPlotConstructor.mock.calls[0]; - expect(receivedData[0]).toEqual([0, 50, 100]); - expect(receivedData[1]).toEqual([1, null, 2]); + expect(receivedData[0]).toStrictEqual([0, 50, 100]); + expect(receivedData[1]).toStrictEqual([1, null, 2]); }); it('passes data through unchanged when no gap exceeds the numeric threshold', () => { @@ -396,8 +396,8 @@ describe('UPlotChart', () => { ); const receivedData = instances[0].setData.mock.calls[0][0]; - expect(receivedData[0]).toEqual([0, 50, 100]); - expect(receivedData[1]).toEqual([3, null, 4]); + expect(receivedData[0]).toStrictEqual([0, 50, 100]); + expect(receivedData[1]).toStrictEqual([3, null, 4]); }); }); diff --git a/frontend/src/lib/uPlotV2/config/__tests__/UPlotAxisBuilder.test.ts b/frontend/src/lib/uPlotV2/config/__tests__/UPlotAxisBuilder.test.ts index 128506dc55..8f2b136821 100644 --- a/frontend/src/lib/uPlotV2/config/__tests__/UPlotAxisBuilder.test.ts +++ b/frontend/src/lib/uPlotV2/config/__tests__/UPlotAxisBuilder.test.ts @@ -40,12 +40,12 @@ describe('UPlotAxisBuilder', () => { expect(config.gap).toBe(5); // Default grid and ticks are created - expect(config.grid).toEqual({ + expect(config.grid).toStrictEqual({ stroke: 'rgba(0,0,0,0.5)', width: 0.2, show: true, }); - expect(config.ticks).toEqual({ + expect(config.ticks).toStrictEqual({ width: 0.3, show: true, }); @@ -81,18 +81,18 @@ describe('UPlotAxisBuilder', () => { expect(config.label).toBe('Time'); expect(config.show).toBe(false); expect(config.gap).toBe(10); - expect(config.grid).toEqual({ + expect(config.grid).toStrictEqual({ stroke: '#ff0000', width: 1, show: false, }); - expect(config.ticks).toEqual({ + expect(config.ticks).toStrictEqual({ stroke: '#00ff00', width: 1, show: false, size: 10, }); - expect(config.values).toEqual(['1', '2', '3']); + expect(config.values).toStrictEqual(['1', '2', '3']); expect(config.space).toBe(20); expect(config.size).toBe(100); expect(config.stroke).toBe('#0000ff'); @@ -111,7 +111,7 @@ describe('UPlotAxisBuilder', () => { const config = builder.getConfig(); - expect(config.grid).toEqual({ + expect(config.grid).toStrictEqual({ // stroke falls back to theme-based default when not provided stroke: 'rgba(231,233,237,0.3)', // provided width overrides default @@ -131,7 +131,7 @@ describe('UPlotAxisBuilder', () => { const withoutTicks = new UPlotAxisBuilder(createAxisProps()); expect(withTicks.getConfig().ticks).toBe(customTicks); - expect(withoutTicks.getConfig().ticks).toEqual({ + expect(withoutTicks.getConfig().ticks).toStrictEqual({ width: 0.3, show: true, }); @@ -192,7 +192,12 @@ describe('UPlotAxisBuilder', () => { expect(getToolTipValue).toHaveBeenNthCalledWith(2, '2', 'ms', 3); // Null/NaN values should map to empty strings - expect(result).toEqual(['formatted:1:ms:3', '', 'formatted:2:ms:3', '']); + expect(result).toStrictEqual([ + 'formatted:1:ms:3', + '', + 'formatted:2:ms:3', + '', + ]); }); it('adds dynamic size calculator only for Y-axis when size is not provided', () => { diff --git a/frontend/src/lib/uPlotV2/config/__tests__/UPlotConfigBuilder.test.ts b/frontend/src/lib/uPlotV2/config/__tests__/UPlotConfigBuilder.test.ts index a9bd90e051..99171e47d4 100644 --- a/frontend/src/lib/uPlotV2/config/__tests__/UPlotConfigBuilder.test.ts +++ b/frontend/src/lib/uPlotV2/config/__tests__/UPlotConfigBuilder.test.ts @@ -75,7 +75,7 @@ describe('UPlotConfigBuilder', () => { const config = builder.getConfig(); const setSelectHooks = config.hooks?.setSelect ?? []; - expect(setSelectHooks.length).toBe(1); + expect(setSelectHooks).toHaveLength(1); const uplotInstance = { select: { left: 10, width: 0 }, @@ -96,7 +96,7 @@ describe('UPlotConfigBuilder', () => { const config = builder.getConfig(); const setSelectHooks = config.hooks?.setSelect ?? []; - expect(setSelectHooks.length).toBe(1); + expect(setSelectHooks).toHaveLength(1); const posToVal = jest .fn() @@ -164,7 +164,7 @@ describe('UPlotConfigBuilder', () => { // Legend items align with series and carry label and color from series config const legendItems = builder.getLegendItems(); - expect(Object.keys(legendItems)).toEqual(['1', '2']); + expect(Object.keys(legendItems)).toStrictEqual(['1', '2']); expect(legendItems[1].seriesIndex).toBe(1); expect(legendItems[1].label).toBe('Requests'); expect(legendItems[2].label).toBe('Errors'); @@ -194,7 +194,7 @@ describe('UPlotConfigBuilder', () => { // Only one scale entry for 'y' (merge path used, no duplicate added) expect(config.scales).toBeDefined(); const scales = config.scales ?? {}; - expect(Object.keys(scales)).toEqual(['y']); + expect(Object.keys(scales)).toStrictEqual(['y']); expect(scales.y?.range).toBeDefined(); }); @@ -359,7 +359,7 @@ describe('UPlotConfigBuilder', () => { const drawHooks = config.hooks?.draw ?? []; // Only a single draw hook should be registered for the same scaleKey - expect(drawHooks.length).toBe(1); + expect(drawHooks).toHaveLength(1); }); it('adds multiple thresholds when scale key is different', () => { @@ -380,7 +380,7 @@ describe('UPlotConfigBuilder', () => { const drawHooks = config.hooks?.draw ?? []; // Two draw hooks should be registered for different scaleKeys - expect(drawHooks.length).toBe(2); + expect(drawHooks).toHaveLength(2); }); it('merges cursor configuration with defaults instead of replacing them', () => { @@ -472,11 +472,11 @@ describe('UPlotConfigBuilder', () => { const config = builder.getConfig(); - expect(config.bands).toEqual(bands); - expect(config.padding).toEqual([10, 20, 30, 40]); - expect(config.legend).toEqual({ show: true, live: true }); - expect(config.focus).toEqual({ alpha: 0.5 }); - expect(config.select).toEqual({ left: 0, width: 0, top: 0, height: 0 }); + expect(config.bands).toStrictEqual(bands); + expect(config.padding).toStrictEqual([10, 20, 30, 40]); + expect(config.legend).toStrictEqual({ show: true, live: true }); + expect(config.focus).toStrictEqual({ alpha: 0.5 }); + expect(config.select).toStrictEqual({ left: 0, width: 0, top: 0, height: 0 }); expect(config.tzDate).toBe(tzDate); }); diff --git a/frontend/src/lib/uPlotV2/config/__tests__/UPlotScaleBuilder.test.ts b/frontend/src/lib/uPlotV2/config/__tests__/UPlotScaleBuilder.test.ts index 9b3f3b2cb7..c24d69c655 100644 --- a/frontend/src/lib/uPlotV2/config/__tests__/UPlotScaleBuilder.test.ts +++ b/frontend/src/lib/uPlotV2/config/__tests__/UPlotScaleBuilder.test.ts @@ -224,12 +224,12 @@ describe('UPlotScaleBuilder', () => { }); expect(builder.props.min).toBe(2); - expect(builder.props.softMax).toBe(undefined); + expect(builder.props.softMax).toBeUndefined(); expect(builder.props.max).toBe(10); expect(builder.props.softMin).toBe(1); expect(builder.props.time).toBe(false); expect(builder.props.scaleKey).toBe('y'); expect(builder.props.distribution).toBe(DistributionType.Linear); - expect(builder.props.thresholds).toBe(undefined); + expect(builder.props.thresholds).toBeUndefined(); }); }); diff --git a/frontend/src/lib/uPlotV2/config/__tests__/UPlotSeriesBuilder.test.ts b/frontend/src/lib/uPlotV2/config/__tests__/UPlotSeriesBuilder.test.ts index f4abf512ac..1a7fa7bdfe 100644 --- a/frontend/src/lib/uPlotV2/config/__tests__/UPlotSeriesBuilder.test.ts +++ b/frontend/src/lib/uPlotV2/config/__tests__/UPlotSeriesBuilder.test.ts @@ -226,7 +226,7 @@ describe('UPlotSeriesBuilder', () => { const config = builder.getConfig(); - expect(config.dash).toEqual([10, 10]); + expect(config.dash).toStrictEqual([10, 10]); expect(config.cap).toBe('round'); }); diff --git a/frontend/src/lib/uPlotV2/context/__tests__/PlotContext.test.tsx b/frontend/src/lib/uPlotV2/context/__tests__/PlotContext.test.tsx index c35a07498c..7ff5bdb2e5 100644 --- a/frontend/src/lib/uPlotV2/context/__tests__/PlotContext.test.tsx +++ b/frontend/src/lib/uPlotV2/context/__tests__/PlotContext.test.tsx @@ -204,7 +204,7 @@ describe('PlotContext', () => { const setSeries = (plot.setSeries as jest.Mock).mock.calls; // index 0 is skipped, so we expect calls for 1 and 2 - expect(setSeries).toEqual([ + expect(setSeries).toStrictEqual([ [1, { show: true }], [2, { show: false }], ]); @@ -249,7 +249,7 @@ describe('PlotContext', () => { const setSeries = (plot.setSeries as jest.Mock).mock.calls; // After reset, all non-zero series should be shown - expect(setSeries).toEqual([ + expect(setSeries).toStrictEqual([ [1, { show: true }], [2, { show: true }], ]); diff --git a/frontend/src/lib/uPlotV2/hooks/__tests__/useLegendsSync.test.ts b/frontend/src/lib/uPlotV2/hooks/__tests__/useLegendsSync.test.ts index 5c4d5e4dd9..e2b61a6024 100644 --- a/frontend/src/lib/uPlotV2/hooks/__tests__/useLegendsSync.test.ts +++ b/frontend/src/lib/uPlotV2/hooks/__tests__/useLegendsSync.test.ts @@ -96,7 +96,7 @@ describe('useLegendsSync', () => { expect.any(Function), ); - expect(result.current.legendItemsMap).toEqual(initialItems); + expect(result.current.legendItemsMap).toStrictEqual(initialItems); }); it('updates focusedSeriesIndex when a series gains focus via setSeries by default', async () => { @@ -168,7 +168,7 @@ describe('useLegendsSync', () => { invokeSetSeries(1, { show: true }); const after = result.current.legendItemsMap; - expect(after).toEqual(before); + expect(after).toStrictEqual(before); }); it('cancels pending visibility RAF on unmount', () => { diff --git a/frontend/src/lib/uPlotV2/utils/__tests__/dataUtils.test.ts b/frontend/src/lib/uPlotV2/utils/__tests__/dataUtils.test.ts index e2ac93899a..49e4fd5cab 100644 --- a/frontend/src/lib/uPlotV2/utils/__tests__/dataUtils.test.ts +++ b/frontend/src/lib/uPlotV2/utils/__tests__/dataUtils.test.ts @@ -105,8 +105,8 @@ describe('dataUtils', () => { const result = insertLargeGapNullsIntoAlignedData(data, options); - expect(result[0]).toEqual([0, 50, 100]); - expect(result[1]).toEqual([1, null, 2]); + expect(result[0]).toStrictEqual([0, 50, 100]); + expect(result[1]).toStrictEqual([1, null, 2]); }); it('inserts nulls at every gap that exceeds the threshold', () => { @@ -120,8 +120,8 @@ describe('dataUtils', () => { const result = insertLargeGapNullsIntoAlignedData(data, options); - expect(result[0]).toEqual([0, 50, 100, 110, 160, 210]); - expect(result[1]).toEqual([1, null, 2, 3, null, 4]); + expect(result[0]).toStrictEqual([0, 50, 100, 110, 160, 210]); + expect(result[1]).toStrictEqual([1, null, 2, 3, null, 4]); }); it('inserts null for all series at a gap triggered by any one series', () => { @@ -140,9 +140,9 @@ describe('dataUtils', () => { const result = insertLargeGapNullsIntoAlignedData(data, options); - expect(result[0]).toEqual([0, 50, 100]); - expect(result[1]).toEqual([1, null, 2]); - expect(result[2]).toEqual([3, null, 4]); + expect(result[0]).toStrictEqual([0, 50, 100]); + expect(result[1]).toStrictEqual([1, null, 2]); + expect(result[2]).toStrictEqual([3, null, 4]); }); it('ignores boolean spanGaps options (only numeric values trigger insertion)', () => { @@ -187,8 +187,8 @@ describe('dataUtils', () => { const result = insertLargeGapNullsIntoAlignedData(data, options); - expect(result[0]).toEqual([0, 50, 100, 110]); - expect(result[1]).toEqual([1, null, null, 2]); + expect(result[0]).toStrictEqual([0, 50, 100, 110]); + expect(result[1]).toStrictEqual([1, null, null, 2]); }); }); @@ -209,7 +209,7 @@ describe('dataUtils', () => { const result = applySpanGapsToAlignedData(data, options); - expect(result[1]).toEqual(ys); + expect(result[1]).toStrictEqual(ys); }); it('converts nulls to undefined when spanGaps is true', () => { @@ -219,7 +219,7 @@ describe('dataUtils', () => { const result = applySpanGapsToAlignedData(data, options); - expect(result[1]).toEqual([1, undefined, 2, undefined]); + expect(result[1]).toStrictEqual([1, undefined, 2, undefined]); }); it('leaves data unchanged when spanGaps is false', () => { @@ -229,7 +229,7 @@ describe('dataUtils', () => { const result = applySpanGapsToAlignedData(data, options); - expect(result[1]).toEqual(ys); + expect(result[1]).toStrictEqual(ys); }); it('inserts a null break point when a gap exceeds the numeric threshold', () => { @@ -242,8 +242,8 @@ describe('dataUtils', () => { const result = applySpanGapsToAlignedData(data, options); - expect(result[0]).toEqual([0, 50, 100, 110]); - expect(result[1]).toEqual([1, null, 2, 3]); + expect(result[0]).toStrictEqual([0, 50, 100, 110]); + expect(result[1]).toStrictEqual([1, null, 2, 3]); }); it('returns original data when no gap exceeds the numeric threshold', () => { @@ -273,11 +273,11 @@ describe('dataUtils', () => { const result = applySpanGapsToAlignedData(data, options); // x-axis extended with the inserted midpoint - expect(result[0]).toEqual([0, 50, 100]); + expect(result[0]).toStrictEqual([0, 50, 100]); // series 0: null at midpoint breaks the line - expect(result[1]).toEqual([1, null, 2]); + expect(result[1]).toStrictEqual([1, null, 2]); // series 1: null at midpoint converted to undefined → line spans over it - expect(result[2]).toEqual([3, undefined, 4]); + expect(result[2]).toStrictEqual([3, undefined, 4]); }); }); }); diff --git a/frontend/src/lib/uPlotV2/utils/__tests__/scale.test.ts b/frontend/src/lib/uPlotV2/utils/__tests__/scale.test.ts index b493be1f92..6c08f20732 100644 --- a/frontend/src/lib/uPlotV2/utils/__tests__/scale.test.ts +++ b/frontend/src/lib/uPlotV2/utils/__tests__/scale.test.ts @@ -19,7 +19,7 @@ describe('scale utils', () => { limits, }); - expect(result).toEqual(limits); + expect(result).toStrictEqual(limits); }); it('snaps positive limits to powers of the log base when distribution is logarithmic', () => { @@ -49,7 +49,7 @@ describe('scale utils', () => { logBase: 2, }); - expect(config).toEqual({}); + expect(config).toStrictEqual({}); }); it('returns linear distribution settings for non-time scales', () => { @@ -80,13 +80,13 @@ describe('scale utils', () => { const { rangeConfig, hardMinOnly, hardMaxOnly, hasFixedRange } = scaleUtils.getRangeConfig(0, 100, null, null, 0.1, 0.2); - expect(rangeConfig.min).toEqual({ + expect(rangeConfig.min).toStrictEqual({ pad: 0.1, hard: 0, soft: undefined, mode: 3, }); - expect(rangeConfig.max).toEqual({ + expect(rangeConfig.max).toStrictEqual({ pad: 0.2, hard: 100, soft: undefined, @@ -127,7 +127,7 @@ describe('scale utils', () => { 'y', ); - expect(result).toEqual([null, null]); + expect(result).toStrictEqual([null, null]); }); it('applies hard min/max for linear scale when only hard limits are set', () => { @@ -156,7 +156,7 @@ describe('scale utils', () => { const result = rangeFn(u, 10, 20, 'y'); // After applyHardLimits, the returned range should respect configured min/max - expect(result).toEqual([0, 100]); + expect(result).toStrictEqual([0, 100]); }); }); @@ -164,7 +164,7 @@ describe('scale utils', () => { it('returns original soft limits when there are no thresholds', () => { const result = scaleUtils.adjustSoftLimitsWithThresholds(1, 5, [], 'ms'); - expect(result).toEqual({ softMin: 1, softMax: 5 }); + expect(result).toStrictEqual({ softMin: 1, softMax: 5 }); }); it('expands soft limits to include threshold min/max values', () => { diff --git a/frontend/src/lib/uPlotV2/utils/__tests__/seriesPointsFilter.test.ts b/frontend/src/lib/uPlotV2/utils/__tests__/seriesPointsFilter.test.ts index aaa4b7de6a..d652dc08d6 100644 --- a/frontend/src/lib/uPlotV2/utils/__tests__/seriesPointsFilter.test.ts +++ b/frontend/src/lib/uPlotV2/utils/__tests__/seriesPointsFilter.test.ts @@ -78,7 +78,7 @@ describe('findSandwichedIndices', () => { ]; const yData = [1, null, null, 2]; const u = makeUPlot({ xData: [0, 1, 2, 3], yData }); - expect(findSandwichedIndices(gaps, yData, u)).toEqual([]); + expect(findSandwichedIndices(gaps, yData, u)).toStrictEqual([]); }); it('returns the index between two gaps that share a pixel boundary', () => { @@ -90,7 +90,7 @@ describe('findSandwichedIndices', () => { // posToIdx(10) → 2 const yData = [null, null, 5, null, null]; const u = makeUPlot({ xData: [0, 1, 2, 3, 4], yData, posToIdxFn: () => 2 }); - expect(findSandwichedIndices(gaps, yData, u)).toEqual([2]); + expect(findSandwichedIndices(gaps, yData, u)).toStrictEqual([2]); }); it('scans to nearest non-null when posToIdx lands on a null', () => { @@ -101,7 +101,7 @@ describe('findSandwichedIndices', () => { ]; const yData = [null, null, null, 7, null]; const u = makeUPlot({ xData: [0, 1, 2, 3, 4], yData, posToIdxFn: () => 2 }); - expect(findSandwichedIndices(gaps, yData, u)).toEqual([3]); + expect(findSandwichedIndices(gaps, yData, u)).toStrictEqual([3]); }); it('returns multiple indices when several gap pairs share boundaries', () => { @@ -118,7 +118,7 @@ describe('findSandwichedIndices', () => { yData, posToIdxFn: (pos) => (pos === 10 ? 1 : 3), }); - expect(findSandwichedIndices(gaps, yData, u)).toEqual([1, 3]); + expect(findSandwichedIndices(gaps, yData, u)).toStrictEqual([1, 3]); }); }); diff --git a/frontend/src/lib/uPlotV2/utils/__tests__/threshold.test.ts b/frontend/src/lib/uPlotV2/utils/__tests__/threshold.test.ts index 6f1f7427a3..6b057c5ca7 100644 --- a/frontend/src/lib/uPlotV2/utils/__tests__/threshold.test.ts +++ b/frontend/src/lib/uPlotV2/utils/__tests__/threshold.test.ts @@ -2,7 +2,7 @@ import { findMinMaxThresholdValues } from '../threshold'; describe('findMinMaxThresholdValues', () => { it('returns [null, null] when thresholds array is empty or missing', () => { - expect(findMinMaxThresholdValues([], 'ms')).toEqual([null, null]); + expect(findMinMaxThresholdValues([], 'ms')).toStrictEqual([null, null]); }); it('returns min and max from thresholdValue when units are not provided', () => { diff --git a/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx b/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx index 2538d5bb6f..b8f4845b0f 100644 --- a/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx +++ b/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx @@ -88,7 +88,7 @@ const logsQueryServerRequest = (): void => ); describe('Logs Explorer Tests', () => { - test('Logs Explorer default view test without data', async () => { + it('Logs Explorer default view test without data', async () => { const { getByRole, queryByText, getByTestId, queryByTestId, container } = render( { }); // update this test properly - test.skip('Logs Explorer Page should render with data', async () => { + it.skip('Logs Explorer Page should render with data', async () => { // mocking the query range API to return the logs logsQueryServerRequest(); const { queryByText, queryByTestId } = render( @@ -166,7 +166,7 @@ describe('Logs Explorer Tests', () => { ); }); - test('Multiple Current Queries', async () => { + it('Multiple Current Queries', async () => { // mocking the query range API to return the logs logsQueryServerRequest(); const { queryAllByText } = render( @@ -235,10 +235,10 @@ describe('Logs Explorer Tests', () => { const queries = queryAllByText( "Enter your filter query (e.g., http.status_code >= 500 AND service.name = 'frontend')", ); - expect(queries.length).toBe(1); + expect(queries).toHaveLength(1); }); - test('frequency chart visibility and switch toggle', async () => { + it('frequency chart visibility and switch toggle', async () => { const { getByRole, queryByText } = render( { describe('SaveView', () => { it('should render the SaveView component', async () => { render(); - expect(await screen.findByText('Table View')).toBeInTheDocument(); + await expect(screen.findByText('Table View')).resolves.toBeInTheDocument(); const savedViews = screen.getAllByRole('row'); expect(savedViews).toHaveLength(2); @@ -53,7 +53,7 @@ describe('SaveView', () => { , ); - expect(await screen.findByText('Table View')).toBeInTheDocument(); + await expect(screen.findByText('Table View')).resolves.toBeInTheDocument(); const explorerIcon = await screen.findAllByTestId('go-to-explorer'); expect(explorerIcon[0]).toBeInTheDocument(); @@ -73,7 +73,7 @@ describe('SaveView', () => { it('should render the SaveView component with a search input', async () => { render(); const searchInput = screen.getByPlaceholderText('Search for views...'); - expect(await screen.findByText('Table View')).toBeInTheDocument(); + await expect(screen.findByText('Table View')).resolves.toBeInTheDocument(); expect(searchInput).toBeInTheDocument(); @@ -88,7 +88,7 @@ describe('SaveView', () => { expect(searchInput).toHaveValue('R-test panel'); searchInput.blur(); - expect(await screen.findByText('R-test panel')).toBeInTheDocument(); + await expect(screen.findByText('R-test panel')).resolves.toBeInTheDocument(); // Table View should not be present now const savedViews = screen.getAllByRole('row'); @@ -146,7 +146,9 @@ describe('SaveView', () => { const deleteButton = await screen.findAllByTestId('delete-view'); fireEvent.click(deleteButton[0]); - expect(await screen.findByText('delete_confirm_message')).toBeInTheDocument(); + await expect( + screen.findByText('delete_confirm_message'), + ).resolves.toBeInTheDocument(); const confirmButton = await screen.findByTestId('confirm-delete'); fireEvent.click(confirmButton); diff --git a/frontend/src/pages/ServiceTopLevelOperations/index.tsx b/frontend/src/pages/ServiceTopLevelOperations/index.tsx index 6b4c210d98..17ad6117d8 100644 --- a/frontend/src/pages/ServiceTopLevelOperations/index.tsx +++ b/frontend/src/pages/ServiceTopLevelOperations/index.tsx @@ -124,7 +124,7 @@ export default function ServiceTopLevelOperations(): JSX.Element { columns={columns} bordered title={(): string => 'Top Level Operations'} - // @ts-ignore + // @ts-expect-error dataSource={topLevelOperations} loading={isLoading} showHeader={false} diff --git a/frontend/src/pages/Services/Metrics.test.tsx b/frontend/src/pages/Services/Metrics.test.tsx index fcafd76466..9dc52550c7 100644 --- a/frontend/src/pages/Services/Metrics.test.tsx +++ b/frontend/src/pages/Services/Metrics.test.tsx @@ -3,7 +3,7 @@ import { render, screen } from 'tests/test-utils'; import Metrics from '.'; describe('Services', () => { - test('Should render the component', () => { + it('Should render the component', () => { render(); const inputBox = screen.getByTestId('resource-attributes-filter'); diff --git a/frontend/src/pages/SignUp/__tests__/SignUp.test.tsx b/frontend/src/pages/SignUp/__tests__/SignUp.test.tsx index 136afb38dd..379169c154 100644 --- a/frontend/src/pages/SignUp/__tests__/SignUp.test.tsx +++ b/frontend/src/pages/SignUp/__tests__/SignUp.test.tsx @@ -138,9 +138,9 @@ describe('SignUp Component - Regular Signup', () => { await user.type(confirmPasswordInput, 'password456'); await user.tab(); // Blur the confirm password field to trigger validation - expect( - await screen.findByText(/passwords don't match/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/passwords don't match/i), + ).resolves.toBeInTheDocument(); }); it('clears password mismatch error when passwords match', async () => { @@ -157,9 +157,9 @@ describe('SignUp Component - Regular Signup', () => { await user.type(confirmPasswordInput, 'password456'); await user.tab(); // Blur the confirm password field to trigger validation - expect( - await screen.findByText(/passwords don't match/i), - ).toBeInTheDocument(); + await expect( + screen.findByText(/passwords don't match/i), + ).resolves.toBeInTheDocument(); await user.clear(confirmPasswordInput); await user.type(confirmPasswordInput, 'password123'); diff --git a/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx b/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx index 0651cc0bf7..e5c1ef053d 100644 --- a/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx +++ b/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx @@ -280,7 +280,7 @@ describe('TracesExplorer - Filters', () => { redirectWithQueryBuilderData.mock.calls[ redirectWithQueryBuilderData.mock.calls.length - 1 ][0].builder.queryData[0].filters.items, - ).toEqual( + ).toStrictEqual( expect.arrayContaining([ expect.objectContaining({ key: { @@ -302,7 +302,7 @@ describe('TracesExplorer - Filters', () => { redirectWithQueryBuilderData.mock.calls[ redirectWithQueryBuilderData.mock.calls.length - 1 ][0].builder.queryData[0].filters.items, - ).toEqual( + ).toStrictEqual( expect.arrayContaining([ expect.objectContaining({ key: { @@ -326,9 +326,9 @@ describe('TracesExplorer - Filters', () => { const { findByText, getByTestId } = render(); // check if the default query is applied - composite query has filters - serviceName : demo-app and name : HTTP GET /customer - expect(await findByText('demo-app')).toBeInTheDocument(); + await expect(findByText('demo-app')).resolves.toBeInTheDocument(); expect(getByTestId('serviceName-demo-app')).toBeChecked(); - expect(await findByText('HTTP GET /customer')).toBeInTheDocument(); + await expect(findByText('HTTP GET /customer')).resolves.toBeInTheDocument(); expect(getByTestId('name-HTTP GET /customer')).toBeChecked(); }); @@ -427,7 +427,7 @@ describe('TracesExplorer - Filters', () => { redirectWithQueryBuilderData.mock.calls[ redirectWithQueryBuilderData.mock.calls.length - 1 ][0].builder.queryData[0].filters.items, - ).toEqual( + ).toStrictEqual( expect.arrayContaining([ expect.objectContaining({ key: { @@ -461,7 +461,7 @@ describe('TracesExplorer - Filters', () => { redirectWithQueryBuilderData.mock.calls[ redirectWithQueryBuilderData.mock.calls.length - 1 ][0].builder.queryData[0].filters.items, - ).not.toEqual( + ).not.toStrictEqual( expect.arrayContaining([ expect.objectContaining({ key: { @@ -486,7 +486,7 @@ describe('TracesExplorer - Filters', () => { redirectWithQueryBuilderData.mock.calls[ redirectWithQueryBuilderData.mock.calls.length - 1 ][0].builder.queryData[0].filters.items, - ).toEqual([]); + ).toStrictEqual([]); }); }); @@ -499,7 +499,7 @@ jest.mock('hooks/useHandleExplorerTabChange', () => ({ let capturedPayload: QueryRangePayloadV5; -describe('TracesExplorer - ', () => { +describe('TracesExplorer -', () => { const quickFiltersListURL = `${BASE_URL}/api/v1/orgs/me/filters/traces`; const setupServer = (): void => { @@ -566,9 +566,9 @@ describe('TracesExplorer - ', () => { expect(capturedPayload).toBeDefined(); }); - expect((capturedPayload.compositeQuery.queries[0].spec as any).order).toEqual( - [{ key: { name: 'timestamp' }, direction: 'desc' }], - ); + expect( + (capturedPayload.compositeQuery.queries[0].spec as any).order, + ).toStrictEqual([{ key: { name: 'timestamp' }, direction: 'desc' }]); }); it.skip('trace explorer - table view', async () => { @@ -601,7 +601,9 @@ describe('TracesExplorer - ', () => { ['/traces-explorer/?panelType=trace&selectedExplorerView=trace'], ); - expect(await screen.findByText('Root Service Name')).toBeInTheDocument(); + await expect( + screen.findByText('Root Service Name'), + ).resolves.toBeInTheDocument(); // assert table headers expect(getByText('Root Operation Name')).toBeInTheDocument(); @@ -620,7 +622,7 @@ describe('TracesExplorer - ', () => { fireEvent.click(traceId); // assert redirection - should go to /trace/:traceId - expect(window.location.href).toEqual( + expect(window.location.href).toBe( 'http://localhost/trace/5765b60ba7cc4ddafe8bdaa9c1b4b246', ); }); @@ -653,10 +655,10 @@ describe('TracesExplorer - ', () => { expect(capturedPayload).toBeDefined(); expect( (capturedPayload?.compositeQuery?.queries[0].spec as any).order, - ).toEqual(defaultOrderBy); + ).toStrictEqual(defaultOrderBy); expect( (capturedPayload?.compositeQuery?.queries[0].spec as any).order, - ).not.toEqual(orderBy); + ).not.toStrictEqual(orderBy); }); }); @@ -679,7 +681,9 @@ describe('TracesExplorer - ', () => { fireEvent.click(hideExplorerOption); // explorer options should hide and show btn should be present - expect(await screen.findByTestId('show-explorer-option')).toBeInTheDocument(); + await expect( + screen.findByTestId('show-explorer-option'), + ).resolves.toBeInTheDocument(); expect(screen.queryByTestId('hide-toolbar')).toBeNull(); // show explorer options @@ -688,7 +692,9 @@ describe('TracesExplorer - ', () => { fireEvent.click(showExplorerOption); // explorer options should show and hide btn should be present - expect(await screen.findByTestId('hide-toolbar')).toBeInTheDocument(); + await expect( + screen.findByTestId('hide-toolbar'), + ).resolves.toBeInTheDocument(); }); it('select a view options - assert and save this view', async () => { @@ -706,9 +712,9 @@ describe('TracesExplorer - ', () => { fireEvent.mouseDown(viewSearchInput); - expect( - await screen.findByRole('option', { name: 'R-test panel' }), - ).toBeInTheDocument(); + await expect( + screen.findByRole('option', { name: 'R-test panel' }), + ).resolves.toBeInTheDocument(); // save this view fireEvent.click(await screen.findByText('Save this view')); @@ -749,7 +755,7 @@ describe('TracesExplorer - ', () => { expect(createDashboardBtn).toBeInTheDocument(); fireEvent.click(createDashboardBtn); - expect(await screen.findByText('Export Panel')).toBeInTheDocument(); + await expect(screen.findByText('Export Panel')).resolves.toBeInTheDocument(); const createDashboardModal = document.querySelector( '.ant-modal-content', ) as HTMLElement; diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx index 972a27d0b0..82b2c9dbc3 100644 --- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx @@ -8,7 +8,7 @@ import WorkspaceLocked from '.'; describe('WorkspaceLocked', () => { const apiURL = 'http://localhost/api/v2/licenses'; - test('Should render the component', async () => { + it('Should render the component', async () => { server.use( rest.get(apiURL, (req, res, ctx) => res(ctx.status(200), ctx.json(licensesSuccessWorkspaceLockedResponse)), @@ -30,7 +30,7 @@ describe('WorkspaceLocked', () => { expect(contactUsBtn).toBeInTheDocument(); }); - test('Render for Admin', async () => { + it('Render for Admin', async () => { server.use( rest.get(apiURL, (req, res, ctx) => res(ctx.status(200), ctx.json(licensesSuccessWorkspaceLockedResponse)), @@ -48,7 +48,7 @@ describe('WorkspaceLocked', () => { expect(updateCreditCardBtn).toBeInTheDocument(); }); - test('Render for non Admin', async () => { + it('Render for non Admin', async () => { server.use( rest.get(apiURL, (req, res, ctx) => res(ctx.status(200), ctx.json(licensesSuccessWorkspaceLockedResponse)), diff --git a/frontend/src/providers/Dashboard/__tests__/Dashboard.test.tsx b/frontend/src/providers/Dashboard/__tests__/Dashboard.test.tsx index 8e536938b3..1feaad0c9a 100644 --- a/frontend/src/providers/Dashboard/__tests__/Dashboard.test.tsx +++ b/frontend/src/providers/Dashboard/__tests__/Dashboard.test.tsx @@ -269,12 +269,12 @@ describe('Dashboard Provider - Query Key with Route Params', () => { .getAll() .map((query) => query.queryKey); expect(cacheKeys).toHaveLength(2); - expect(cacheKeys[0]).toEqual([ + expect(cacheKeys[0]).toStrictEqual([ REACT_QUERY_KEY.DASHBOARD_BY_ID, dashboardId1, true, // globalTime.isAutoRefreshDisabled ]); - expect(cacheKeys[1]).toEqual([ + expect(cacheKeys[1]).toStrictEqual([ REACT_QUERY_KEY.DASHBOARD_BY_ID, dashboardId2, true, // globalTime.isAutoRefreshDisabled @@ -475,7 +475,10 @@ describe('Dashboard Provider - URL Variables Integration', () => { // The selectedValue should be updated with normalized URL values expect(parsedVariables.environment.selectedValue).toBe('development'); - expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']); + expect(parsedVariables.services.selectedValue).toStrictEqual([ + 'db', + 'cache', + ]); // allSelected should be set to false when URL values override expect(parsedVariables.environment.allSelected).toBe(false); diff --git a/frontend/src/providers/Dashboard/__tests__/normalizeUrlValue.test.ts b/frontend/src/providers/Dashboard/__tests__/normalizeUrlValue.test.ts index 70bebe9aa1..b317e70a3b 100644 --- a/frontend/src/providers/Dashboard/__tests__/normalizeUrlValue.test.ts +++ b/frontend/src/providers/Dashboard/__tests__/normalizeUrlValue.test.ts @@ -17,7 +17,7 @@ const multiSelectVariable: Partial = { describe('normalizeUrlValueForVariable', () => { describe('Single select variable', () => { - test('should keep single string value as is', () => { + it('should keep single string value as is', () => { const result = normalizeUrlValueForVariable( 'production', singleSelectVariable as IDashboardVariable, @@ -25,7 +25,7 @@ describe('normalizeUrlValueForVariable', () => { expect(result).toBe('production'); }); - test('should keep single number value as is', () => { + it('should keep single number value as is', () => { const result = normalizeUrlValueForVariable( 123, singleSelectVariable as IDashboardVariable, @@ -33,7 +33,7 @@ describe('normalizeUrlValueForVariable', () => { expect(result).toBe(123); }); - test('should take first element from array', () => { + it('should take first element from array', () => { const result = normalizeUrlValueForVariable( ['production', 'staging'], singleSelectVariable as IDashboardVariable, @@ -41,7 +41,7 @@ describe('normalizeUrlValueForVariable', () => { expect(result).toBe('production'); }); - test('should return null for empty array', () => { + it('should return null for empty array', () => { const result = normalizeUrlValueForVariable( [], singleSelectVariable as IDashboardVariable, @@ -49,7 +49,7 @@ describe('normalizeUrlValueForVariable', () => { expect(result).toBeNull(); }); - test('should handle null/undefined values', () => { + it('should handle null/undefined values', () => { expect( normalizeUrlValueForVariable( null, @@ -66,39 +66,39 @@ describe('normalizeUrlValueForVariable', () => { }); describe('Multi select variable', () => { - test('should convert string to array', () => { + it('should convert string to array', () => { const result = normalizeUrlValueForVariable( 'production', multiSelectVariable as IDashboardVariable, ); - expect(result).toEqual(['production']); + expect(result).toStrictEqual(['production']); }); - test('should convert number to array', () => { + it('should convert number to array', () => { const result = normalizeUrlValueForVariable( 123, multiSelectVariable as IDashboardVariable, ); - expect(result).toEqual([123]); + expect(result).toStrictEqual([123]); }); - test('should keep array as is', () => { + it('should keep array as is', () => { const result = normalizeUrlValueForVariable( ['production', 'staging'], multiSelectVariable as IDashboardVariable, ); - expect(result).toEqual(['production', 'staging']); + expect(result).toStrictEqual(['production', 'staging']); }); - test('should keep empty array as is', () => { + it('should keep empty array as is', () => { const result = normalizeUrlValueForVariable( [], multiSelectVariable as IDashboardVariable, ); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); - test('should handle null/undefined values', () => { + it('should handle null/undefined values', () => { expect( normalizeUrlValueForVariable( null, @@ -115,7 +115,7 @@ describe('normalizeUrlValueForVariable', () => { }); describe('Real world scenarios', () => { - test('URL has array ["test", "prod"] for single select variable -> should take "test"', () => { + it('URL has array ["test", "prod"] for single select variable -> should take "test"', () => { const result = normalizeUrlValueForVariable( ['test', 'prod'], singleSelectVariable as IDashboardVariable, @@ -123,15 +123,15 @@ describe('normalizeUrlValueForVariable', () => { expect(result).toBe('test'); }); - test('URL has string "test" for multi select variable -> should convert to ["test"]', () => { + it('URL has string "test" for multi select variable -> should convert to ["test"]', () => { const result = normalizeUrlValueForVariable( 'test', multiSelectVariable as IDashboardVariable, ); - expect(result).toEqual(['test']); + expect(result).toStrictEqual(['test']); }); - test('Handles mixed types in array for single select', () => { + it('Handles mixed types in array for single select', () => { const result = normalizeUrlValueForVariable( ['test', 123, true], singleSelectVariable as IDashboardVariable, diff --git a/frontend/src/providers/Dashboard/store/__tests__/variableFetchStore.test.ts b/frontend/src/providers/Dashboard/store/__tests__/variableFetchStore.test.ts index 4d93cd67e8..4666d9231e 100644 --- a/frontend/src/providers/Dashboard/store/__tests__/variableFetchStore.test.ts +++ b/frontend/src/providers/Dashboard/store/__tests__/variableFetchStore.test.ts @@ -63,7 +63,11 @@ describe('variableFetchStore', () => { initializeVariableFetchStore(['a', 'b', 'c']); const storeSnapshot = variableFetchStore.getSnapshot(); - expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' }); + expect(storeSnapshot.states).toStrictEqual({ + a: 'idle', + b: 'idle', + c: 'idle', + }); }); it('should preserve existing states for known variables', () => { @@ -103,7 +107,7 @@ describe('variableFetchStore', () => { initializeVariableFetchStore([]); const storeSnapshot = variableFetchStore.getSnapshot(); - expect(storeSnapshot.states).toEqual({}); + expect(storeSnapshot.states).toStrictEqual({}); }); }); diff --git a/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStore.test.ts b/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStore.test.ts index 4b87ac8eaa..9ab900638b 100644 --- a/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStore.test.ts +++ b/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStore.test.ts @@ -50,8 +50,8 @@ describe('dashboardVariablesStore', () => { const storeSnapshot = dashboardVariablesStore.getSnapshot(); expect(storeSnapshot.dashboardId).toBe('dash-1'); - expect(storeSnapshot.variables).toEqual(variables); - expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' }); + expect(storeSnapshot.variables).toStrictEqual(variables); + expect(storeSnapshot.variableTypes).toStrictEqual({ env: 'QUERY' }); expect(storeSnapshot.sortedVariablesArray).toHaveLength(1); }); }); @@ -76,11 +76,11 @@ describe('dashboardVariablesStore', () => { }); const storeSnapshot = dashboardVariablesStore.getSnapshot(); - expect(storeSnapshot.variableTypes).toEqual({ + expect(storeSnapshot.variableTypes).toStrictEqual({ env: 'QUERY', dyn1: 'DYNAMIC', }); - expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']); + expect(storeSnapshot.dynamicVariableOrder).toStrictEqual(['dyn1']); }); it('should replace dashboardId when it does not match', () => { @@ -100,10 +100,10 @@ describe('dashboardVariablesStore', () => { const storeSnapshot = dashboardVariablesStore.getSnapshot(); expect(storeSnapshot.dashboardId).toBe('dash-2'); - expect(storeSnapshot.variableTypes).toEqual({ + expect(storeSnapshot.variableTypes).toStrictEqual({ a: 'QUERY', }); - expect(storeSnapshot.variableTypes).not.toEqual({ + expect(storeSnapshot.variableTypes).not.toStrictEqual({ 'not-there': 'QUERY', }); }); @@ -126,8 +126,8 @@ describe('dashboardVariablesStore', () => { const { variableTypes, dynamicVariableOrder, dependencyData } = getVariableDependencyContext(); - expect(variableTypes).toEqual({ env: 'QUERY' }); - expect(dynamicVariableOrder).toEqual([]); + expect(variableTypes).toStrictEqual({ env: 'QUERY' }); + expect(dynamicVariableOrder).toStrictEqual([]); expect(dependencyData).not.toBeNull(); }); diff --git a/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStoreUtils.test.ts b/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStoreUtils.test.ts index 25ea8153ab..b72a71c385 100644 --- a/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStoreUtils.test.ts +++ b/frontend/src/providers/Dashboard/store/dashboardVariables/__tests__/dashboardVariablesStoreUtils.test.ts @@ -33,12 +33,12 @@ describe('dashboardVariablesStoreUtils', () => { const result = buildSortedVariablesArray(variables); - expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']); + expect(result.map((v) => v.name)).toStrictEqual(['a', 'b', 'c']); }); it('should return empty array for empty variables', () => { const result = buildSortedVariablesArray({}); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('should create copies of variables (not references)', () => { @@ -48,7 +48,7 @@ describe('dashboardVariablesStoreUtils', () => { const result = buildSortedVariablesArray(variables); expect(result[0]).not.toBe(original); - expect(result[0]).toEqual(original); + expect(result[0]).toStrictEqual(original); }); }); @@ -63,7 +63,7 @@ describe('dashboardVariablesStoreUtils', () => { const result = buildVariableTypesMap(sorted); - expect(result).toEqual({ + expect(result).toStrictEqual({ env: 'QUERY', region: 'CUSTOM', dynVar: 'DYNAMIC', @@ -72,7 +72,7 @@ describe('dashboardVariablesStoreUtils', () => { }); it('should return empty object for empty array', () => { - expect(buildVariableTypesMap([])).toEqual({}); + expect(buildVariableTypesMap([])).toStrictEqual({}); }); }); @@ -87,7 +87,7 @@ describe('dashboardVariablesStoreUtils', () => { const result = buildDynamicVariableOrder(sorted); - expect(result).toEqual(['dyn1', 'dyn2']); + expect(result).toStrictEqual(['dyn1', 'dyn2']); }); it('should return empty array when no DYNAMIC variables exist', () => { @@ -96,11 +96,11 @@ describe('dashboardVariablesStoreUtils', () => { createVariable({ name: 'b', type: 'CUSTOM' }), ]; - expect(buildDynamicVariableOrder(sorted)).toEqual([]); + expect(buildDynamicVariableOrder(sorted)).toStrictEqual([]); }); it('should return empty array for empty input', () => { - expect(buildDynamicVariableOrder([])).toEqual([]); + expect(buildDynamicVariableOrder([])).toStrictEqual([]); }); }); @@ -125,12 +125,12 @@ describe('dashboardVariablesStoreUtils', () => { expect(result.sortedVariablesArray[0].name).toBe('env'); expect(result.sortedVariablesArray[1].name).toBe('dyn1'); - expect(result.variableTypes).toEqual({ + expect(result.variableTypes).toStrictEqual({ env: 'QUERY', dyn1: 'DYNAMIC', }); - expect(result.dynamicVariableOrder).toEqual(['dyn1']); + expect(result.dynamicVariableOrder).toStrictEqual(['dyn1']); // dependencyData should exist since there are variables expect(result.dependencyData).not.toBeNull(); @@ -139,10 +139,10 @@ describe('dashboardVariablesStoreUtils', () => { it('should return null dependencyData for empty variables', () => { const result = computeDerivedValues({}); - expect(result.sortedVariablesArray).toEqual([]); + expect(result.sortedVariablesArray).toStrictEqual([]); expect(result.dependencyData).toBeNull(); - expect(result.variableTypes).toEqual({}); - expect(result.dynamicVariableOrder).toEqual([]); + expect(result.variableTypes).toStrictEqual({}); + expect(result.dynamicVariableOrder).toStrictEqual([]); }); it('should handle all four variable types together', () => { @@ -172,21 +172,21 @@ describe('dashboardVariablesStoreUtils', () => { const result = computeDerivedValues(variables); expect(result.sortedVariablesArray).toHaveLength(4); - expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([ + expect(result.sortedVariablesArray.map((v) => v.name)).toStrictEqual([ 'queryVar', 'customVar', 'dynVar', 'textVar', ]); - expect(result.variableTypes).toEqual({ + expect(result.variableTypes).toStrictEqual({ queryVar: 'QUERY', customVar: 'CUSTOM', dynVar: 'DYNAMIC', textVar: 'TEXTBOX', }); - expect(result.dynamicVariableOrder).toEqual(['dynVar']); + expect(result.dynamicVariableOrder).toStrictEqual(['dynVar']); expect(result.dependencyData).not.toBeNull(); }); @@ -201,7 +201,7 @@ describe('dashboardVariablesStoreUtils', () => { const result = computeDerivedValues(variables); - expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([ + expect(result.sortedVariablesArray.map((v) => v.name)).toStrictEqual([ 'a', 'b', 'm', @@ -221,7 +221,7 @@ describe('dashboardVariablesStoreUtils', () => { const result = computeDerivedValues(variables); - expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']); + expect(result.dynamicVariableOrder).toStrictEqual(['dyn1', 'dyn2', 'dyn3']); }); it('should build dependency data with query variable order for dependent queries', () => { @@ -316,7 +316,7 @@ describe('dashboardVariablesStoreUtils', () => { expect(depData).not.toBeNull(); expect(depData?.transitiveDescendants).toBeDefined(); // region's transitive descendants should include cluster and host - expect(depData?.transitiveDescendants['region']).toEqual( + expect(depData?.transitiveDescendants['region']).toStrictEqual( expect.arrayContaining(['cluster', 'host']), ); }); @@ -333,10 +333,10 @@ describe('dashboardVariablesStoreUtils', () => { const result = computeDerivedValues(variables); expect(result.sortedVariablesArray).toHaveLength(1); - expect(result.variableTypes).toEqual({ solo: 'QUERY' }); - expect(result.dynamicVariableOrder).toEqual([]); + expect(result.variableTypes).toStrictEqual({ solo: 'QUERY' }); + expect(result.dynamicVariableOrder).toStrictEqual([]); expect(result.dependencyData).not.toBeNull(); - expect(result.dependencyData?.order).toEqual(['solo']); + expect(result.dependencyData?.order).toStrictEqual(['solo']); }); it('should handle only non-QUERY variables', () => { @@ -362,8 +362,8 @@ describe('dashboardVariablesStoreUtils', () => { expect(result.sortedVariablesArray).toHaveLength(3); // No QUERY variables, so dependency order should be empty - expect(result.dependencyData?.order).toEqual([]); - expect(result.dynamicVariableOrder).toEqual(['dyn1']); + expect(result.dependencyData?.order).toStrictEqual([]); + expect(result.dynamicVariableOrder).toStrictEqual(['dyn1']); }); }); }); diff --git a/frontend/src/providers/preferences/__tests__/PreferencesProvider.integration.test.tsx b/frontend/src/providers/preferences/__tests__/PreferencesProvider.integration.test.tsx index 1d2811dbeb..e3b998c91d 100644 --- a/frontend/src/providers/preferences/__tests__/PreferencesProvider.integration.test.tsx +++ b/frontend/src/providers/preferences/__tests__/PreferencesProvider.integration.test.tsx @@ -126,7 +126,7 @@ describe('PreferencesProvider integration', () => { const stored = getLocalStorageJSON( LOCALSTORAGE.LOGS_LIST_OPTIONS, ); - expect(stored?.selectColumns).toEqual([ + expect(stored?.selectColumns).toStrictEqual([ defaultLogsSelectedColumns[0] as TelemetryFieldKey, ]); }); @@ -291,7 +291,7 @@ describe('PreferencesProvider integration', () => { const stored = getLocalStorageJSON( LOCALSTORAGE.TRACES_LIST_OPTIONS, ); - expect(stored?.selectColumns).toEqual([ + expect(stored?.selectColumns).toStrictEqual([ defaultTraceSelectedColumns[0] as TelemetryFieldKey, ]); }); diff --git a/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts b/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts index 8c0d549199..aba41b6110 100644 --- a/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts +++ b/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts @@ -63,7 +63,7 @@ describe('logsLoaderConfig', () => { }); it('should have priority order: local, url, default', () => { - expect(logsLoaderConfig.priority).toEqual(['local', 'url', 'default']); + expect(logsLoaderConfig.priority).toStrictEqual(['local', 'url', 'default']); }); it('should load from localStorage when available', async () => { @@ -86,7 +86,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.local(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: mockColumns, formatting: { maxLines: 10, @@ -103,7 +103,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.local(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: [] as BaseAutocompleteData[], formatting: undefined, }); @@ -131,7 +131,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.url(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: mockColumns, formatting: { maxLines: 5, @@ -148,7 +148,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.url(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: [] as BaseAutocompleteData[], formatting: undefined, }); @@ -157,7 +157,7 @@ describe('logsLoaderConfig', () => { it('should provide default values when no other source is available', async () => { const result = await logsLoaderConfig.default(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: defaultLogsSelectedColumns, formatting: { maxLines: 1, @@ -181,7 +181,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.url(); // Should only keep logs columns - expect(result.columns).toEqual(mockLogsColumns); + expect(result.columns).toStrictEqual(mockLogsColumns); }); it('should filter out Traces columns from localStorage', async () => { @@ -194,7 +194,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.local(); // Should filter out all Traces columns - expect(result.columns).toEqual([]); + expect(result.columns).toStrictEqual([]); }); it('should accept valid Logs columns from URL', async () => { @@ -208,7 +208,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.url(); - expect(result.columns).toEqual(logsColumns); + expect(result.columns).toStrictEqual(logsColumns); }); it('should fall back to defaults when all columns are filtered out from URL', async () => { @@ -223,7 +223,7 @@ describe('logsLoaderConfig', () => { const result = await logsLoaderConfig.url(); // Should return empty array, which triggers fallback to defaults in preferencesLoader - expect(result.columns).toEqual([]); + expect(result.columns).toStrictEqual([]); }); it('should handle columns without signal field (legacy data)', async () => { @@ -243,7 +243,7 @@ describe('logsLoaderConfig', () => { // Without signal field, columns pass through validation // This matches the current implementation behavior where only columns // with signal !== 'logs' are filtered out - expect(result.columns).toEqual(columnsWithoutSignal); + expect(result.columns).toStrictEqual(columnsWithoutSignal); }); }); }); diff --git a/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts b/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts index 7daed39685..1b8f072425 100644 --- a/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts +++ b/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts @@ -96,7 +96,7 @@ describe('logsUpdaterConfig', () => { const storedData = JSON.parse( mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS], ); - expect(storedData.selectColumns).toEqual(newColumns); + expect(storedData.selectColumns).toStrictEqual(newColumns); expect(storedData.maxLines).toBe(1); // Should preserve other fields // Should not update saved view preferences @@ -174,7 +174,7 @@ describe('logsUpdaterConfig', () => { expect(storedData.format).toBe('json'); expect(storedData.fontSize).toBe('large'); expect(storedData.version).toBe(1); - expect(storedData.selectColumns).toEqual([ + expect(storedData.selectColumns).toStrictEqual([ { key: 'column', type: 'tag', diff --git a/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts b/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts index 776e185656..f34cc80ca1 100644 --- a/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts +++ b/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts @@ -51,7 +51,11 @@ describe('tracesLoaderConfig', () => { }); it('should have priority order: local, url, default', () => { - expect(tracesLoaderConfig.priority).toEqual(['local', 'url', 'default']); + expect(tracesLoaderConfig.priority).toStrictEqual([ + 'local', + 'url', + 'default', + ]); }); it('should load from localStorage when available', async () => { @@ -69,7 +73,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.local(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: mockColumns, }); }); @@ -80,7 +84,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.local(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: [] as BaseAutocompleteData[], }); }); @@ -103,7 +107,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.url(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: mockColumns, }); }); @@ -114,7 +118,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.url(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: [] as BaseAutocompleteData[], }); }); @@ -122,7 +126,7 @@ describe('tracesLoaderConfig', () => { it('should provide default values when no other source is available', async () => { const result = await tracesLoaderConfig.default(); - expect(result).toEqual({ + expect(result).toStrictEqual({ columns: defaultTraceSelectedColumns as TelemetryFieldKey[], }); }); @@ -143,7 +147,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.url(); // Should filter out all Logs columns - expect(result.columns).toEqual([]); + expect(result.columns).toStrictEqual([]); }); it('should filter out Logs columns (timestamp with logs signal) from URL', async () => { @@ -161,7 +165,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.url(); // Should only keep trace columns - expect(result.columns).toEqual([ + expect(result.columns).toStrictEqual([ { name: 'service.name', signal: 'traces', fieldContext: 'resource' }, ]); }); @@ -179,7 +183,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.local(); // Should filter out all Logs columns - expect(result.columns).toEqual([]); + expect(result.columns).toStrictEqual([]); }); it('should accept valid Trace columns from URL', async () => { @@ -196,7 +200,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.url(); - expect(result.columns).toEqual(traceColumns); + expect(result.columns).toStrictEqual(traceColumns); }); it('should fall back to defaults when all columns are filtered out from URL', async () => { @@ -211,7 +215,7 @@ describe('tracesLoaderConfig', () => { const result = await tracesLoaderConfig.url(); // Should return empty array, which triggers fallback to defaults in preferencesLoader - expect(result.columns).toEqual([]); + expect(result.columns).toStrictEqual([]); }); it('should handle columns without signal field (legacy data)', async () => { @@ -231,7 +235,7 @@ describe('tracesLoaderConfig', () => { // Without signal field, columns pass through validation // This matches the current implementation behavior where only columns // with signal !== 'traces' are filtered out - expect(result.columns).toEqual(columnsWithoutSignal); + expect(result.columns).toStrictEqual(columnsWithoutSignal); }); }); }); diff --git a/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx b/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx index 5865e2fa12..30870e84cb 100644 --- a/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx +++ b/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx @@ -61,11 +61,11 @@ describe('usePreferenceLoader', () => { }); // Should have loaded from local storage (highest priority) - expect(result.current.preferences).toEqual({ + expect(result.current.preferences).toStrictEqual({ columns: [{ name: 'local-column' }], formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 }, }); - expect(result.current.error).toBe(null); + expect(result.current.error).toBeNull(); expect(setReSync).not.toHaveBeenCalled(); // Should not call setReSync when reSync is false }); @@ -85,7 +85,7 @@ describe('usePreferenceLoader', () => { }); // Should have loaded trace columns - expect(result.current.preferences).toEqual({ + expect(result.current.preferences).toStrictEqual({ columns: [{ name: 'local-trace-column' }], }); expect(setReSync).not.toHaveBeenCalled(); // Should not call setReSync when reSync is false diff --git a/frontend/src/store/globalTime/__tests__/utils.test.ts b/frontend/src/store/globalTime/__tests__/utils.test.ts index 763ccb1be4..3fc187d649 100644 --- a/frontend/src/store/globalTime/__tests__/utils.test.ts +++ b/frontend/src/store/globalTime/__tests__/utils.test.ts @@ -59,7 +59,7 @@ describe('globalTime/utils', () => { const maxTime = 2000000000; const timeString = `${minTime}${CUSTOM_TIME_SEPARATOR}${maxTime}`; const result = parseCustomTimeRange(timeString); - expect(result).toEqual({ minTime, maxTime }); + expect(result).toStrictEqual({ minTime, maxTime }); }); it('should return null for non-custom time range strings', () => { @@ -75,7 +75,7 @@ describe('globalTime/utils', () => { it('should handle zero values', () => { const result = parseCustomTimeRange(`0${CUSTOM_TIME_SEPARATOR}0`); - expect(result).toEqual({ minTime: 0, maxTime: 0 }); + expect(result).toStrictEqual({ minTime: 0, maxTime: 0 }); }); }); @@ -94,7 +94,7 @@ describe('globalTime/utils', () => { const maxTime = 2000000000; const timeString = createCustomTimeRange(minTime, maxTime); const result = parseSelectedTime(timeString); - expect(result).toEqual({ minTime, maxTime }); + expect(result).toStrictEqual({ minTime, maxTime }); }); it('should return fallback for invalid custom time range', () => { diff --git a/frontend/src/tests/mapQueryDataFromApi.test.ts b/frontend/src/tests/mapQueryDataFromApi.test.ts index 568c58ffbe..9519cb2a29 100644 --- a/frontend/src/tests/mapQueryDataFromApi.test.ts +++ b/frontend/src/tests/mapQueryDataFromApi.test.ts @@ -73,8 +73,8 @@ describe('mapQueryDataFromApi', (): void => { const result = mapQueryDataFromApi(compositeQuery); expect(result.queryType).toBe(EQueryType.QUERY_BUILDER); - expect(result.promql).toEqual([]); - expect(result.clickhouse_sql).toEqual([]); + expect(result.promql).toStrictEqual([]); + expect(result.clickhouse_sql).toStrictEqual([]); // Expect one builder query mapped and merged using base query fields expect(result.builder.queryData).toHaveLength(1); @@ -84,17 +84,17 @@ describe('mapQueryDataFromApi', (): void => { expect(q.dataSource).toBe(DataSource.TRACES); expect(q.stepInterval).toBe(60); // filter overridden from V5 spec (no trailing space) - expect(q.filter).toEqual({ expression: "service.name = 'adservice'" }); + expect(q.filter).toStrictEqual({ expression: "service.name = 'adservice'" }); // having overridden from V5 spec expect((q.having as unknown as { expression: string }).expression).toBe( 'avg(app.ads.count) != 0', ); // orderBy preserved from base - expect(q.orderBy).toEqual([ + expect(q.orderBy).toStrictEqual([ { columnName: 'avg(app.ads.count)', order: 'asc' }, ]); // groupBy converted from V5 spec - expect(q.groupBy).toEqual([ + expect(q.groupBy).toStrictEqual([ { key: 'service.name', dataType: DataTypes.String, @@ -103,7 +103,7 @@ describe('mapQueryDataFromApi', (): void => { }, ]); // aggregations replaced with array from V5 spec - expect(q.aggregations).toEqual([ + expect(q.aggregations).toStrictEqual([ { expression: 'count()' }, { expression: 'avg(app.ads.count)', alias: 'avtv' }, ]); diff --git a/frontend/src/utils/__tests__/queryContextUtils.test.ts b/frontend/src/utils/__tests__/queryContextUtils.test.ts index e96b53a629..a46ec59b36 100644 --- a/frontend/src/utils/__tests__/queryContextUtils.test.ts +++ b/frontend/src/utils/__tests__/queryContextUtils.test.ts @@ -10,12 +10,12 @@ describe('extractQueryPairs', () => { jest.clearAllMocks(); }); - test('should extract NOT EXISTS and NOT LIKE correctly', () => { + it('should extract NOT EXISTS and NOT LIKE correctly', () => { const input = "active NOT EXISTS AND name NOT LIKE '%tmp%'"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ { key: 'active', operator: 'EXISTS', @@ -59,10 +59,10 @@ describe('extractQueryPairs', () => { ]); }); - test('should test for filter expression with freeText', () => { + it('should test for filter expression with freeText', () => { const input = "disconnected deployment.env not in ['mq-kafka']"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ { key: 'disconnected', operator: '', @@ -111,10 +111,10 @@ describe('extractQueryPairs', () => { ]); }); - test('should extract IN with numeric list inside parentheses', () => { + it('should extract IN with numeric list inside parentheses', () => { const input = 'id IN (1, 2, 3)'; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ expect.objectContaining({ key: 'id', operator: 'IN', @@ -131,10 +131,10 @@ describe('extractQueryPairs', () => { ]); }); - test('should handle extra whitespace and separators in IN lists', () => { + it('should handle extra whitespace and separators in IN lists', () => { const input = "label IN [ 'a' , 'b' , 'c' ]"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ expect.objectContaining({ key: 'label', operator: 'IN', @@ -151,10 +151,10 @@ describe('extractQueryPairs', () => { ]); }); - test('should extract correct query pairs when the query has space at the start of the value', () => { + it('should extract correct query pairs when the query has space at the start of the value', () => { const input = " label IN [ 'a' , 'b' , 'c' ]"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ expect.objectContaining({ key: 'label', operator: 'IN', @@ -171,10 +171,10 @@ describe('extractQueryPairs', () => { ]); }); - test('should return incomplete pair when value is missing', () => { + it('should return incomplete pair when value is missing', () => { const input = 'a ='; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ expect.objectContaining({ key: 'a', operator: '=', @@ -184,10 +184,10 @@ describe('extractQueryPairs', () => { ]); }); - test('should parse pairs within grouping parentheses with conjunctions', () => { + it('should parse pairs within grouping parentheses with conjunctions', () => { const input = "(name = 'x' AND age > 10) OR active EXISTS"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ expect.objectContaining({ key: 'name', operator: '=', @@ -209,13 +209,13 @@ describe('extractQueryPairs', () => { ]); }); - test('should extract query pairs from complex query with IN operator and multiple conditions', () => { + it('should extract query pairs from complex query with IN operator and multiple conditions', () => { const input = "service.name IN ['adservice', 'consumer-svc-1'] AND cloud.account.id = 'signoz-staging' code.lineno < 172"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ { key: 'service.name', operator: 'IN', @@ -288,12 +288,12 @@ describe('extractQueryPairs', () => { ]); }); - test('should extract query pairs from complex query with IN operator without brackets', () => { + it('should extract query pairs from complex query with IN operator without brackets', () => { const input = "service.name IN 'adservice' AND cloud.account.id = 'signoz-staging' code.lineno < 172"; const result = extractQueryPairs(input); - expect(result).toEqual([ + expect(result).toStrictEqual([ { key: 'service.name', operator: 'IN', @@ -362,7 +362,7 @@ describe('extractQueryPairs', () => { ]); }); - test('should handle recursion guard', () => { + it('should handle recursion guard', () => { // This test verifies the recursion protection in the function // We'll mock the function to simulate recursion @@ -379,7 +379,7 @@ describe('extractQueryPairs', () => { consoleSpy.mockRestore(); }); - test('should treat lowercase exists as non-value operator', () => { + it('should treat lowercase exists as non-value operator', () => { const input = 'body exists service.name contains "test"'; const result = extractQueryPairs(input); @@ -387,18 +387,18 @@ describe('extractQueryPairs', () => { expect(result[0].key).toBe('body'); expect(result[0].operator).toBe('exists'); expect(result[0].value).toBeUndefined(); - expect(result[0].valuesPosition).toEqual([]); + expect(result[0].valuesPosition).toStrictEqual([]); expect(result[0].isComplete).toBe(false); expect(result[1].key).toBe('service.name'); expect(result[1].operator).toBe('contains'); expect(result[1].value).toBe('"test"'); - expect(result[1].valuesPosition).toEqual([]); + expect(result[1].valuesPosition).toStrictEqual([]); expect(result[1].isComplete).toBe(true); }); }); describe('createContext', () => { - test('should create a context object with all parameters', () => { + it('should create a context object with all parameters', () => { const mockToken = { type: 29, text: 'test', @@ -419,7 +419,7 @@ describe('createContext', () => { null, // currentPair ); - expect(result).toEqual({ + expect(result).toStrictEqual({ tokenType: 29, text: 'test', start: 0, @@ -440,7 +440,7 @@ describe('createContext', () => { }); }); - test('should create a context object with minimal parameters', () => { + it('should create a context object with minimal parameters', () => { const mockToken = { type: 29, text: 'test', @@ -450,7 +450,7 @@ describe('createContext', () => { const result = createContext(mockToken as any, false, false, false, false); - expect(result).toEqual({ + expect(result).toStrictEqual({ tokenType: 29, text: 'test', start: 0, @@ -473,7 +473,7 @@ describe('createContext', () => { }); describe('getCurrentValueIndexAtCursor', () => { - test('should return correct value index when cursor is within a value range', () => { + it('should return correct value index when cursor is within a value range', () => { const valuesPosition = [ { start: 0, end: 10 }, { start: 15, end: 25 }, @@ -485,7 +485,7 @@ describe('getCurrentValueIndexAtCursor', () => { expect(result).toBe(1); }); - test('should return null when cursor is not within any value range', () => { + it('should return null when cursor is not within any value range', () => { const valuesPosition = [ { start: 0, end: 10 }, { start: 15, end: 25 }, @@ -496,7 +496,7 @@ describe('getCurrentValueIndexAtCursor', () => { expect(result).toBeNull(); }); - test('should return correct index when cursor is at the boundary', () => { + it('should return correct index when cursor is at the boundary', () => { const valuesPosition = [ { start: 0, end: 10 }, { start: 15, end: 25 }, @@ -507,7 +507,7 @@ describe('getCurrentValueIndexAtCursor', () => { expect(result).toBe(0); }); - test('should return null for empty valuesPosition array', () => { + it('should return null for empty valuesPosition array', () => { const result = getCurrentValueIndexAtCursor([], 5); expect(result).toBeNull(); @@ -515,7 +515,7 @@ describe('getCurrentValueIndexAtCursor', () => { }); describe('getCurrentQueryPair', () => { - test('should return the correct query pair at cursor position', () => { + it('should return the correct query pair at cursor position', () => { const queryPairs = [ { key: 'a', @@ -550,10 +550,10 @@ describe('getCurrentQueryPair', () => { const query = 'a = 1 AND b = 2'; const result = getCurrentQueryPair(queryPairs, query, 15); - expect(result).toEqual(queryPairs[1]); + expect(result).toStrictEqual(queryPairs[1]); }); - test('should return null when no pairs match cursor position', () => { + it('should return null when no pairs match cursor position', () => { const queryPairs = [ { key: 'a', @@ -578,13 +578,13 @@ describe('getCurrentQueryPair', () => { expect(result).toBeNull(); }); - test('should return null for empty queryPairs array', () => { + it('should return null for empty queryPairs array', () => { const result = getCurrentQueryPair([], 'test query', 5); expect(result).toBeNull(); }); - test('should return last pair when cursor is at the end', () => { + it('should return last pair when cursor is at the end', () => { const queryPairs = [ { key: 'a', @@ -605,6 +605,6 @@ describe('getCurrentQueryPair', () => { const query = 'a = 1'; const result = getCurrentQueryPair(queryPairs, query, 5); - expect(result).toEqual(queryPairs[0]); + expect(result).toStrictEqual(queryPairs[0]); }); }); diff --git a/frontend/src/utils/__tests__/sanitizeOrderBy.test.ts b/frontend/src/utils/__tests__/sanitizeOrderBy.test.ts index d53c7346d0..1ab6c4fd13 100644 --- a/frontend/src/utils/__tests__/sanitizeOrderBy.test.ts +++ b/frontend/src/utils/__tests__/sanitizeOrderBy.test.ts @@ -88,7 +88,7 @@ describe('sanitizeOrderByForExplorer', () => { const result = sanitizeOrderByForExplorer(query); - expect(result).toEqual([ + expect(result).toStrictEqual([ { columnName: 'service.name', order: 'asc' }, { columnName: 'count()', order: 'desc' }, { columnName: 'avg(duration)', order: 'asc' }, @@ -116,7 +116,7 @@ describe('sanitizeOrderByForExplorer', () => { }); const result = sanitizeOrderByForExplorer(query); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); it('handles missing orderBy by returning an empty array', () => { @@ -124,6 +124,6 @@ describe('sanitizeOrderByForExplorer', () => { const query = buildQuery({ orderBy: [] }); const result = sanitizeOrderByForExplorer(query); - expect(result).toEqual([]); + expect(result).toStrictEqual([]); }); }); diff --git a/frontend/src/utils/__tests__/spanToTree.test.ts b/frontend/src/utils/__tests__/spanToTree.test.ts index 4cf7a20fb4..5dc3163be4 100644 --- a/frontend/src/utils/__tests__/spanToTree.test.ts +++ b/frontend/src/utils/__tests__/spanToTree.test.ts @@ -2,20 +2,20 @@ import { TraceData } from '../fixtures/TraceData'; import { spanToTreeUtil } from '../spanToTree'; describe('utils/spanToTree', () => { - test('should return a single tree on valid trace data', () => { + it('should return a single tree on valid trace data', () => { const spanTree = spanToTreeUtil(TraceData); - expect(spanTree.spanTree.length).toBe(1); - expect(spanTree.missingSpanTree.length).toBe(0); + expect(spanTree.spanTree).toHaveLength(1); + expect(spanTree.missingSpanTree).toHaveLength(0); expect(spanTree).toMatchSnapshot(); }); - test('should return a single tree on valid trace data', () => { + it('should return a single tree on valid trace data', () => { const MissingTraceData = [...TraceData]; MissingTraceData.splice(1, 1); const spanTree = spanToTreeUtil(MissingTraceData); - expect(spanTree.spanTree.length).toBe(1); - expect(spanTree.missingSpanTree.length).toBe(1); + expect(spanTree.spanTree).toHaveLength(1); + expect(spanTree.missingSpanTree).toHaveLength(1); expect(spanTree).toMatchSnapshot(); }); }); diff --git a/frontend/src/utils/timeUtils.ts b/frontend/src/utils/timeUtils.ts index 60133ac7d0..ce92600ad0 100644 --- a/frontend/src/utils/timeUtils.ts +++ b/frontend/src/utils/timeUtils.ts @@ -37,7 +37,7 @@ export const getRemainingDays = (billingEndDate: number): number => { const endDate = new Date(billingEndDate * 1000); // Convert seconds to milliseconds // Calculate the time difference in milliseconds - // @ts-ignore + // @ts-expect-error const timeDifference = endDate - startDate; return Math.ceil(timeDifference / (1000 * 60 * 60 * 24)); diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 613758fa26..1e6ea35293 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -34,7 +34,6 @@ "./src/*" ] }, - "downlevelIteration": true, "plugins": [ { "name": "typescript-plugin-css-modules" diff --git a/frontend/yarn.lock b/frontend/yarn.lock index b3337eb796..d4efb75805 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -4216,225 +4216,225 @@ resolved "https://registry.yarnpkg.com/@oxc-project/types/-/types-0.101.0.tgz#5692200d09d6f87341eac3f8e70e403173c5283e" integrity sha512-nuFhqlUzJX+gVIPPfuE6xurd4lST3mdcWOhyK/rZO0B9XWMKm79SuszIQEnSMmmDhq1DC8WWVYGVd+6F93o1gQ== -"@oxfmt/binding-android-arm-eabi@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-android-arm-eabi/-/binding-android-arm-eabi-0.46.0.tgz#d2da704ab6c741e535de3445d0994290b77b3fc0" - integrity sha512-b1doV4WRcJU+BESSlCvCjV+5CEr/T6h0frArAdV26Nir+gGNFNaylvDiiMPfF1pxeV0txZEs38ojzJaxBYg+ng== +"@oxfmt/binding-android-arm-eabi@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-android-arm-eabi/-/binding-android-arm-eabi-0.47.0.tgz#5516a2c695e6d45829b2c98a01d9528b0b6e61a0" + integrity sha512-KrMQRdMi/upr81qT4ijK6X6BNp6jqpMY7FwILQnwIy9QLc3qpnhUx5rsCLGzn4ewsCQ0CNAspN2ogmP1GXLyLw== -"@oxfmt/binding-android-arm64@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-android-arm64/-/binding-android-arm64-0.46.0.tgz#203341eaf489c325a01e247c6209f13f39f0777f" - integrity sha512-v6+HhjsoV3GO0u2u9jLSAZrvWfTraDxKofUIQ7/ktS7tzS+epVsxdHmeM+XxuNcAY/nWxxU1Sg4JcGTNRXraBA== +"@oxfmt/binding-android-arm64@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-android-arm64/-/binding-android-arm64-0.47.0.tgz#85ea358f46a73f2485343fe12c5223fdb50c069d" + integrity sha512-r4ixS/PeUpAFKgrpDoZ5pSkthjZzVzKd95525Aazj+aOv9H4ulK5zYHGb7wFY5n5kZxHK8TbOJUZgoEb1ohddQ== -"@oxfmt/binding-darwin-arm64@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-darwin-arm64/-/binding-darwin-arm64-0.46.0.tgz#ce653c87e00305c84edb380ecac3db789c3ea9a4" - integrity sha512-3eeooJGrqGIlI5MyryDZsAcKXSmKIgAD4yYtfRrRJzXZ0UTFZtiSveIur56YPrGMYZwT4XyVhHsMqrNwr1XeFA== +"@oxfmt/binding-darwin-arm64@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-darwin-arm64/-/binding-darwin-arm64-0.47.0.tgz#90c757ac8d87b4ede92965aec61eac94e47ff6dd" + integrity sha512-CLWxiKpMl+195cm09CuaWEhJK0CirRkoMa07aR9+9AFPat2LfIKtwx1JqxZM0MTvcMe6+adlJNdVL6jdInvq3g== -"@oxfmt/binding-darwin-x64@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-darwin-x64/-/binding-darwin-x64-0.46.0.tgz#71016678036fab249172c763d2f59a3535f0fe91" - integrity sha512-QG8BDM0CXWbu84k2SKmCqfEddPQPFiBicwtYnLqHRWZZl57HbtOLRMac/KTq2NO4AEc4ICCBpFxJIV9zcqYfkQ== +"@oxfmt/binding-darwin-x64@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-darwin-x64/-/binding-darwin-x64-0.47.0.tgz#1ebcf47ed790e31441e73ecaeee441f6ab0fe0fb" + integrity sha512-Xq5fjTYDC50faUeLSm0rZdBqoTgleXEdD7NpJdARtQIczkCJn3xNjMUSQQkUmh4CtxkKTNL68lytcOK3e/osgg== -"@oxfmt/binding-freebsd-x64@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-freebsd-x64/-/binding-freebsd-x64-0.46.0.tgz#fd939be2e39c29fead415f41c70eb52f94d53d6a" - integrity sha512-9DdCqS/n2ncu/Chazvt3cpgAjAmIGQDz7hFKSrNItMApyV/Ja9mz3hD4JakIE3nS8PW9smEbPWnb389QLBY4nw== +"@oxfmt/binding-freebsd-x64@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-freebsd-x64/-/binding-freebsd-x64-0.47.0.tgz#99eaea7abd9ba897b697eabcffc1c475e78cb6b7" + integrity sha512-QOU9ZIJ52p5askcEC0QJvvr8trHAWoonul8bgISo6gYUL3s50zkqafBYcNAr9LJZQbsZtPfIWHk9+5+nUp1qJQ== -"@oxfmt/binding-linux-arm-gnueabihf@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.46.0.tgz#09f4f3894e140340256a02665832bdcf66d8b5c3" - integrity sha512-Dgs7VeE2jT0LHMhw6tPEt0xQYe54kBqHEovmWsv4FVQlegCOvlIJNx0S8n4vj8WUtpT+Z6BD2HhKJPLglLxvZg== +"@oxfmt/binding-linux-arm-gnueabihf@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.47.0.tgz#9166b54e49e3311f1337ed085ddea7884def2762" + integrity sha512-oJxDM1aBhPvz9gmElBv8UpxyiqhwfjcbrSxT5F0xtuUzY6dQI27/AQPIt3eu3Z5Yvn0kQl5R7MA3Z+MbnRvCBw== -"@oxfmt/binding-linux-arm-musleabihf@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.46.0.tgz#35ca79c2c643dd5dd7eb1e1c0cbc5205eecba8ee" - integrity sha512-Zxn3adhTH13JKnU4xXJj8FeEfF680XjXh3gSShKl57HCMBRde2tUJTgogV/1MSHA80PJEVrDa7r66TLVq3Ia7Q== +"@oxfmt/binding-linux-arm-musleabihf@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.47.0.tgz#665bd22db9a926533e1c22354a6f516a1151ecac" + integrity sha512-g8Lh50VS4ibGz2q6v7r9UZY4D0dM16SdrFYOMzhqIoCwGcai8VMIRUAcqn1/jlCsOOzUXJ741+kCeJt0cofakQ== -"@oxfmt/binding-linux-arm64-gnu@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.46.0.tgz#5f30f09b2d109ec18f1d9de64414718f4c9975be" - integrity sha512-+TWipjrgVM8D7aIdDD0tlr3teLTTvQTn7QTE5BpT10H1Fj82gfdn9X6nn2sDgx/MepuSCfSnzFNJq2paLL0OiA== +"@oxfmt/binding-linux-arm64-gnu@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.47.0.tgz#b6f1a0c40557397ec4ad79727bae16636c76c411" + integrity sha512-YrNT1vQ0asaXoRbrvYENPqmBfOQ9Xr8enPNOULeYfg44VjCcrUowFy5QZr+WawE0zyP8cH9e9Gxxg0fDEFzhcg== -"@oxfmt/binding-linux-arm64-musl@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.46.0.tgz#a550718469f40dcb8a382eb5a89b031d7f47ee16" - integrity sha512-aAUPBWJ1lGwwnxZUEDLJ94+Iy6MuwJwPxUgO4sCA5mEEyDk7b+cDQ+JpX1VR150Zoyd+D49gsrUzpUK5h587Eg== +"@oxfmt/binding-linux-arm64-musl@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.47.0.tgz#9617da44d91b4bb0a679ec1504eb0aa0c919fbb1" + integrity sha512-IxtQC/sbBi4ubbY+MdwdanRWrG9InQJVZqyMsBa5IUaQcnSg86gQme574HxXMC1p4bo4YhV99zQ+wNnGCvEgzw== -"@oxfmt/binding-linux-ppc64-gnu@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.46.0.tgz#51d25e82d6c965aa09660ef31a19e6d8a79253a9" - integrity sha512-ufBCJukyFX/UDrokP/r6BGDoTInnsDs7bxyzKAgMiZlt2Qu8GPJSJ6Zm6whIiJzKk0naxA8ilwmbO1LMw6Htxw== +"@oxfmt/binding-linux-ppc64-gnu@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.47.0.tgz#6e672e383178bd0a90b5685d9e513d60a66ba818" + integrity sha512-EWXEhOMbWO0q6eJSbu0QLkU8cKi0ljlYLngeDs2Ocu/pm1rrLwyQiYzlFbdnMRURI4w9ndr1sI9rSbhlJ5o23Q== -"@oxfmt/binding-linux-riscv64-gnu@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.46.0.tgz#02565fd79a18de150f9a76a00f76fdd3a5e7f0fa" - integrity sha512-eqtlC2YmPqjun76R1gVfGLuKWx7NuEnLEAudZ7n6ipSKbCZTqIKSs1b5Y8K/JHZsRpLkeSmAAjig5HOIg8fQzQ== +"@oxfmt/binding-linux-riscv64-gnu@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.47.0.tgz#68563084137eb3ca353af0d55739fdbf49a667a0" + integrity sha512-tZrjS11TUiDuEpRaqdk8K9F9xETRyKXfuZKmdeW+Gj7coBnm7+8sBEfyt033EAFEQSlkniAXvBLh+Qja2ioGBQ== -"@oxfmt/binding-linux-riscv64-musl@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.46.0.tgz#d148eb5b52a0c96ce90361ed733443df2f03e5c5" - integrity sha512-yccVOO2nMXkQLGgy0He3EQEwKD7NF0zEk+/OWmroznkqXyJdN6bfK0LtNnr6/14Bh3FjpYq7bP33l/VloCnxpA== +"@oxfmt/binding-linux-riscv64-musl@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.47.0.tgz#6a2f0d5f159d86e16691753a49c7e78027e8e568" + integrity sha512-KBFy+2CFKUCZzYwX2ZOPQKck1vjQbz+hextuc19G4r0WRJwadfAeuQMQRQvB+Ivc8brlbOVg7et8K7E467440g== -"@oxfmt/binding-linux-s390x-gnu@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.46.0.tgz#22cbee991e79147e7a0d5db2cc9aa74bf5bd6d89" - integrity sha512-aAf7fG23OQCey6VRPj9IeCraoYtpgtx0ZyJ1CXkPyT1wjzBE7c3xtuxHe/AdHaJfVVb/SXpSk8Gl1LzyQupSqw== +"@oxfmt/binding-linux-s390x-gnu@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.47.0.tgz#888c1e9db152c68e51ebe3c4b21a8c2fc5331f07" + integrity sha512-REUPFKVGSiK99B+9eaPhluEVglzaoj/SMykNC5SUiV2RSsBfV5lWN7Y0iCIc251Wz3GaeAGZsJ/zj3gjarxdFg== -"@oxfmt/binding-linux-x64-gnu@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.46.0.tgz#35fdda5137b108ec5cc0a8656a04fabdd0edd652" - integrity sha512-q0JPsTMyJNjYrBvYFDz4WbVsafNZaPCZv4RnFypRotLqpKROtBZcEaXQW4eb9YmvLU3NckVemLJnzkSZSdmOxw== +"@oxfmt/binding-linux-x64-gnu@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.47.0.tgz#8a279adaa8a404acc661b5ba915cc72ac6e15626" + integrity sha512-KVftVSVEDeIfRW3TIeLe3aNI/iY4m1fu5mDwHcisKMZSCMKLkrhFsjowC7o9RoqNPxbbglm2+/6KAKBIts2t0Q== -"@oxfmt/binding-linux-x64-musl@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-x64-musl/-/binding-linux-x64-musl-0.46.0.tgz#dc399b372b27b26534d516a5a68927843f400603" - integrity sha512-7LsLY9Cw57GPkhSR+duI3mt9baRczK/DtHYSldQ4BEU92da9igBQNl4z7Vq5U9NNPsh1FmpKvv1q9WDtiUQR1A== +"@oxfmt/binding-linux-x64-musl@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-linux-x64-musl/-/binding-linux-x64-musl-0.47.0.tgz#e8ecc62fd533255298f316f64ed1d25553ea147d" + integrity sha512-DTsmGEaA2860Aq5VUyDO8/MT9NFxwVL93RnRYmpMwK6DsSkThmvEpqoUDDljziEpAedMRG19SCogrNbINSbLUQ== -"@oxfmt/binding-openharmony-arm64@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-openharmony-arm64/-/binding-openharmony-arm64-0.46.0.tgz#b7c89ecd6e1827dca86779f4d08dc200bad5915c" - integrity sha512-lHiBOz8Duaku7JtRNLlps3j++eOaICPZSd8FCVmTDM4DFOPT71Bjn7g6iar1z7StXlKRweUKxWUs4sA+zWGDXg== +"@oxfmt/binding-openharmony-arm64@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-openharmony-arm64/-/binding-openharmony-arm64-0.47.0.tgz#96ee2865d75c0fc8a65416ad8866218ba47c394d" + integrity sha512-8r5BDro7fLOBoq1JXHLVSs55OlrxQhEso4HVo0TcY7OXJUPYfjPoOaYL5us+yIwqyP9rQwN+rxuiNFSmaxSuOQ== -"@oxfmt/binding-win32-arm64-msvc@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.46.0.tgz#ec0ea70875374bf0a1c949bac6a2177e23425bb8" - integrity sha512-/5ktYUliP89RhgC37DBH1x20U5zPSZMy3cMEcO0j3793rbHP9MWsknBwQB6eozRzWmYrh0IFM/p20EbPvDlYlg== +"@oxfmt/binding-win32-arm64-msvc@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.47.0.tgz#ca4025455656510653c8491809efa83cb5c0cb70" + integrity sha512-qtz/gzm8IjSPUlseZ0ofW8zyHLoZsuP5HTfcGGkWkUblB89JT8GNYH3ICqjbDsqsGqXum0/ZndXTFplSdXFIcg== -"@oxfmt/binding-win32-ia32-msvc@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.46.0.tgz#fa7b6bf31602c814ffdd8e3bfad355569b2f08da" - integrity sha512-3WTnoiuIr8XvV0DIY7SN+1uJSwKf4sPpcbHfobcRT9JutGcLaef/miyBB87jxd3aqH+mS0+G5lsgHuXLUwjjpQ== +"@oxfmt/binding-win32-ia32-msvc@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.47.0.tgz#66a3379214661b6a738da74ea5fd09ce4d55390c" + integrity sha512-5vIcdcIDE7nCx+MXN6sm8kbC4zajDB31E86rez4i45iHNH/2NjdKlJ720xcHTr3eeiMcttCGPHPhE1TjtBDGZw== -"@oxfmt/binding-win32-x64-msvc@0.46.0": - version "0.46.0" - resolved "https://registry.yarnpkg.com/@oxfmt/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.46.0.tgz#0ddd94829568c94101abe88896e48f724891e4f2" - integrity sha512-IXxiQpkYnOwNfP23vzwSfhdpxJzyiPTY7eTn6dn3DsriKddESzM8i6kfq9R7CD/PUJwCvQT22NgtygBeug3KoA== +"@oxfmt/binding-win32-x64-msvc@0.47.0": + version "0.47.0" + resolved "https://registry.yarnpkg.com/@oxfmt/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.47.0.tgz#ad962adfc251270125f98b107a985d1365ce6b07" + integrity sha512-Sr59Y5ms54ONBjxFeWhVlGyQcHXxcl9DxC23f6yXlRkcos7LXBLoO+KDfxexjHIOZh7cWqrWduzvUjJ+pHp8cQ== -"@oxlint-tsgolint/darwin-arm64@0.21.1": - version "0.21.1" - resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/darwin-arm64/-/darwin-arm64-0.21.1.tgz#b4390b047d246608b130e2bde238306659c2b292" - integrity sha512-7TLjyWe4wG9saJc992VWmaHq2hwKfOEEVTjheReXJXaDhavMZI4X9a6nKhbEng4IVkYtzjD2jw16vw2WFXLYLw== +"@oxlint-tsgolint/darwin-arm64@0.22.1": + version "0.22.1" + resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/darwin-arm64/-/darwin-arm64-0.22.1.tgz#02bf38e8f91e276c01d93c590f0ba843d12ab000" + integrity sha512-4150Lpgc1YM09GcjA6GSrra1JoPjC7aOpfywLjWEY4vW0Sd1qKzqHF1WRaiw0/qUZ40OATYdv3aRd7ipPkWQbw== -"@oxlint-tsgolint/darwin-x64@0.21.1": - version "0.21.1" - resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/darwin-x64/-/darwin-x64-0.21.1.tgz#18f607e47c04459962d2ddf1461491f5690c3f8f" - integrity sha512-7wf9Wf75nTzA7zpL9myhFe2RKvfuqGUOADNvUooCjEWvh7hmPz3lSEqTMh5Z/VQhzsG04mM9ACyghxhRzq7zFw== +"@oxlint-tsgolint/darwin-x64@0.22.1": + version "0.22.1" + resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/darwin-x64/-/darwin-x64-0.22.1.tgz#fe5ef6548a6ee016fb4827b80f205d940c4f3f08" + integrity sha512-vFWcPWYOgZs4HWcgS1EjUZg33NLcNfEYU49KGImmCfZWkflENrmBYV4HN/C0YeAPum6ZZ/goPSvQrB/cOD+NfA== -"@oxlint-tsgolint/linux-arm64@0.21.1": - version "0.21.1" - resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/linux-arm64/-/linux-arm64-0.21.1.tgz#da5932c940362faa261c67085c9eb215a8f48bab" - integrity sha512-IPuQN/Vd0Rjklg/cCGBbQyUuRBp2f6LQXpZYwk5ivOR6V/+CgiYsv8pn/PVY7gjeyoNvPQrXB7xMjHUO2YZbdw== +"@oxlint-tsgolint/linux-arm64@0.22.1": + version "0.22.1" + resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/linux-arm64/-/linux-arm64-0.22.1.tgz#e8262db8794275bbbea9630cd9d6ebcbd190e38c" + integrity sha512-6LiUpP0Zir3+29FvBm7Y28q/dBjSHqTZ5MhG1Ckw4fGhI4cAvbcwXaKvbjx1TP7rRmBNOoq/M5xdpHjTb+GAew== -"@oxlint-tsgolint/linux-x64@0.21.1": - version "0.21.1" - resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/linux-x64/-/linux-x64-0.21.1.tgz#26c916b25c852bb63c9bd95f61bd0228400ddef0" - integrity sha512-d1niGuTbh2qiv7dR7tqkbOcM5cIR63of0lMBFdEQavL1KrJV8zuRdwdi68K7MNGdgoR+J5A9ajpGGvsHwp1bPg== +"@oxlint-tsgolint/linux-x64@0.22.1": + version "0.22.1" + resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/linux-x64/-/linux-x64-0.22.1.tgz#848b7a2062826511252a003cdcf01e6ab3ae3fd5" + integrity sha512-fuX1hEQfpHauUbXADsfqVhRzrUrGabzGXbj5wsp2vKhV5uk/Rze8Mba9GdjFGECzvXudMGqHqxB4r6jGRdhxVA== -"@oxlint-tsgolint/win32-arm64@0.21.1": - version "0.21.1" - resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/win32-arm64/-/win32-arm64-0.21.1.tgz#c6e78be4394f29abd9106137c5f0a9e7ee4e0d97" - integrity sha512-ICu9y2JLnFPvFqstnWPPNqBM8LK8BWw2OTeaR0UgEMm4hOSbrZAKv1/hwZYyiLqnCNjBL87AGSQIgTHCYlsipw== +"@oxlint-tsgolint/win32-arm64@0.22.1": + version "0.22.1" + resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/win32-arm64/-/win32-arm64-0.22.1.tgz#9e046b04251b6d9c40eddf985cd72a23a635c7bd" + integrity sha512-8SZidAj+jrbZf9ZjBEYW0tiNZ+KasqB2zgW26qdiPpQSF/DzURnPmXz651IeA9YsmbVdHGIooEHUmev6QJdquA== -"@oxlint-tsgolint/win32-x64@0.21.1": - version "0.21.1" - resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/win32-x64/-/win32-x64-0.21.1.tgz#e0ff1eebe8837effe74af3f76c9ae6afc8992e58" - integrity sha512-cTEFCFjCj6iXfrSHcvajSPNqhEA4TxSzU3gFxbdGSAUTNXGToU99IbdhWAPSbhcucoym0XE4Zl7E41NiSkNTug== +"@oxlint-tsgolint/win32-x64@0.22.1": + version "0.22.1" + resolved "https://registry.yarnpkg.com/@oxlint-tsgolint/win32-x64/-/win32-x64-0.22.1.tgz#fb1999770b62dc42996593ee30a488df6c5b6c60" + integrity sha512-QweSk9H5lFh5Y+WUf2Kq/OAN88V6+62ZwGhP38gqdRotI90luXSMkruFTj7Q2rYrzH4ZVNaSqx7NY8JpSfIzqg== -"@oxlint/binding-android-arm-eabi@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-android-arm-eabi/-/binding-android-arm-eabi-1.61.0.tgz#c7a8fb22ac3084d6f4c873cdc9779f4f9e38b805" - integrity sha512-6eZBPgiigK5txqoVgRqxbaxiom4lM8AP8CyKPPvpzKnQ3iFRFOIDc+0AapF+qsUSwjOzr5SGk4SxQDpQhkSJMQ== +"@oxlint/binding-android-arm-eabi@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-android-arm-eabi/-/binding-android-arm-eabi-1.62.0.tgz#4b13c0a80392e45a466099d9bcd2861887658565" + integrity sha512-pKsthNECyvJh8lPTICz6VcwVy2jOqdhhsp1rlxCkhgZR47aKvXPmaRWQDv+zlXpRae4qm1MaaTnutkaOk5aofg== -"@oxlint/binding-android-arm64@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-android-arm64/-/binding-android-arm64-1.61.0.tgz#5fe7c707e12513ede292da015d13e93781771e9f" - integrity sha512-CkwLR69MUnyv5wjzebvbbtTSUwqLxM35CXE79bHqDIK+NtKmPEUpStTcLQRZMCo4MP0qRT6TXIQVpK0ZVScnMA== +"@oxlint/binding-android-arm64@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-android-arm64/-/binding-android-arm64-1.62.0.tgz#f79cbc4e8e10d0c03bea45f12371d92c1b1fb184" + integrity sha512-b1AUNViByvgmR2xJDubvLIr+dSuu3uraG7bsAoKo+xrpspPvu6RIn6Fhr2JUhobfep3jwUTy18Huco6GkwdvGQ== -"@oxlint/binding-darwin-arm64@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-darwin-arm64/-/binding-darwin-arm64-1.61.0.tgz#1f58a4c592b76e293e5b9072cc0e4e7a5bef8191" - integrity sha512-8JbefTkbmvqkqWjmQrHke+MdpgT2UghhD/ktM4FOQSpGeCgbMToJEKdl9zwhr/YWTl92i4QI1KiTwVExpcUN8A== +"@oxlint/binding-darwin-arm64@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-darwin-arm64/-/binding-darwin-arm64-1.62.0.tgz#cbb84d6903ca40221aa5447c919f5e831949df3d" + integrity sha512-iG+Tvf70UJ6otfwFYIHk36Sjq9cpPP5YLxkoggANNRtzgi3Tj3g8q6Ybqi6AtkU3+yg9QwF7bDCkCS6bbL4PCg== -"@oxlint/binding-darwin-x64@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-darwin-x64/-/binding-darwin-x64-1.61.0.tgz#eade7425cc943c3146f3e0f0fb4d08d615ee78a9" - integrity sha512-uWpoxDT47hTnDLcdEh5jVbso8rlTTu5o0zuqa9J8E0JAKmIWn7kGFEIB03Pycn2hd2vKxybPGLhjURy/9We5FQ== +"@oxlint/binding-darwin-x64@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-darwin-x64/-/binding-darwin-x64-1.62.0.tgz#3f06184754e8bffb96b7218257f3440f5e58f681" + integrity sha512-oOWI6YPPr5AJUx+yIDlxmuUbQjS5gZX3OH3QisawYvsZgLiQVvZtR0rPBcJTxLWqt2ClrWg0DlSrlUiG5SQNHg== -"@oxlint/binding-freebsd-x64@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-freebsd-x64/-/binding-freebsd-x64-1.61.0.tgz#8d82382a07ce4895c91694a810dee7f39866951b" - integrity sha512-K/o4hEyW7flfMel0iBVznmMBt7VIMHGdjADocHKpK1DUF9erpWnJ+BSSWd2W0c8K3mPtpph+CuHzRU6CI3l9jQ== +"@oxlint/binding-freebsd-x64@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-freebsd-x64/-/binding-freebsd-x64-1.62.0.tgz#240a17efea5f5c0edb006acc269751dcd99579ec" + integrity sha512-dLP33T7VLCmLVv4cvjkVX+rmkcwNk2UfxmsZPNur/7BQHoQR60zJ7XLiRvNUawlzn0u8ngCa3itjEG73MAMa/w== -"@oxlint/binding-linux-arm-gnueabihf@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.61.0.tgz#df76a5700651cda73ca5578de7308f9479b7cb1b" - integrity sha512-P6040ZkcyweJ0Po9yEFqJCdvZnf3VNCGs1SIHgXDf8AAQNC6ID/heXQs9iSgo2FH7gKaKq32VWc59XZwL34C5Q== +"@oxlint/binding-linux-arm-gnueabihf@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.62.0.tgz#7dda9ca87f3bdc3f575a1c26189e37d1a9e37970" + integrity sha512-fl//LWNks6qo9chNY60UDYyIwtp7a5cEx4Y/rHPjaarhuwqx6jtbzEpD5V5AqmdL4a6Y5D8zeXg5HF2Cr0QmSQ== -"@oxlint/binding-linux-arm-musleabihf@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-1.61.0.tgz#b198e5697cfb6cc55eff5ab5b9766732e143492d" - integrity sha512-bwxrGCzTZkuB+THv2TQ1aTkVEfv5oz8sl+0XZZCpoYzErJD8OhPQOTA0ENPd1zJz8QsVdSzSrS2umKtPq4/JXg== +"@oxlint/binding-linux-arm-musleabihf@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-1.62.0.tgz#a1e1e292ab90fd54f6af7dd15a5b067b1577c0e8" + integrity sha512-i5vkAuxvueTODV3J2dL61/TXewDHhMFKvtD156cIsk7GsdfiAu7zW7kY0NJXhKeFHeiMZIh7eFNjkPYH6J47HQ== -"@oxlint/binding-linux-arm64-gnu@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.61.0.tgz#a5cdae7ef9f9b79526ef764fb174bb362c3737ee" - integrity sha512-vkhb9/wKguMkLlrm3FoJW/Xmdv31GgYAE+x8lxxQ+7HeOxXUySI0q36a3NTVIuQUdLzxCI1zzMGsk1o37FOe3w== +"@oxlint/binding-linux-arm64-gnu@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.62.0.tgz#c98592a1d53be7dda9b8a58f3707fe7be2cdc027" + integrity sha512-QwN19LLuIGuOjEflSeJkZmOTfBdBMlTmW8xbMf8TZhjd//cxVNYQPq75q7oKZBJc6hRx3gY7sX0Egc8cEIFZYg== -"@oxlint/binding-linux-arm64-musl@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.61.0.tgz#227f6d94a3c00140a88ea338436783bd9da6f4ae" - integrity sha512-bl1dQh8LnVqsj6oOQAcxwbuOmNJkwc4p6o//HTBZhNTzJy21TLDwAviMqUFNUxDHkPGpmdKTSN4tWTjLryP8xg== +"@oxlint/binding-linux-arm64-musl@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.62.0.tgz#d9fbd4e8538b60c9b1dd87ffa5d132b6a5d3de56" + integrity sha512-8eCy3FCDuWUM5hWujAv6heMvfZPbcCOU3SdQUAkixZLu5bSzOkNfirJiLGoQFO943xceOKkiQRMQNzH++jM3WA== -"@oxlint/binding-linux-ppc64-gnu@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.61.0.tgz#e504e6cd8691fe460b202599d4c962fc7ad98f94" - integrity sha512-QoOX6KB2IiEpyOj/HKqaxi+NQHPnOgNgnr22n9N4ANJCzXkUlj1UmeAbFb4PpqdlHIzvGDM5xZ0OKtcLq9RhiQ== +"@oxlint/binding-linux-ppc64-gnu@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.62.0.tgz#b417ea16f901a2e16fce7523993533fa9ff30eac" + integrity sha512-NjQ7K7tpTPDe9J+yq8p/s/J0E7lRCkK2uDBDqvT4XIT6f4Z0tlnr59OBg/WcrmVHER1AbrcfyxhGTXgcG8ytWg== -"@oxlint/binding-linux-riscv64-gnu@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-1.61.0.tgz#efe70d9e570756fe19fb60d05a39dd58e9e0f32d" - integrity sha512-1TGcTerjY6p152wCof3oKElccq3xHljS/Mucp04gV/4ATpP6nO7YNnp7opEg6SHkv2a57/b4b8Ndm9znJ1/qAw== +"@oxlint/binding-linux-riscv64-gnu@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-1.62.0.tgz#a4e12361d1b72164f454ac6517302721b8dab45a" + integrity sha512-oKZed9gmSwze29dEt3/Wnsv6l/Ygw/FUst+8Kfpv2SGeS/glEoTGZAMQw37SVyzFV76UTHJN2snGgxK2t2+8ow== -"@oxlint/binding-linux-riscv64-musl@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-1.61.0.tgz#f9c42095192f235b03a5d3d1f1fbfb45002d5a93" - integrity sha512-65wXEmZIrX2ADwC8i/qFL4EWLSbeuBpAm3suuX1vu4IQkKd+wLT/HU/BOl84kp91u2SxPkPDyQgu4yrqp8vwVA== +"@oxlint/binding-linux-riscv64-musl@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-1.62.0.tgz#f3a9e825d95360c03048d1d9117a1c2c1f1f547b" + integrity sha512-gBjBxQ+9lGpAYq+ELqw0w8QXsBnkZclFc7GRX2r0LnEVn3ZTEqeIKpKcGjucmp76Q53bvJD0i4qBWBhcfhSfGA== -"@oxlint/binding-linux-s390x-gnu@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.61.0.tgz#23773cd6f8087f98c54383caccb98808fab6cca0" - integrity sha512-TVvhgMvor7Qa6COeXxCJ7ENOM+lcAOGsQ0iUdPSCv2hxb9qSHLQ4XF1h50S6RE1gBOJ0WV3rNukg4JJJP1LWRA== +"@oxlint/binding-linux-s390x-gnu@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.62.0.tgz#39c4b7769f7847b5d806fd7ed9b67dfbd05dc75c" + integrity sha512-Ew2Kxs9EQ9/mbAIJ2hvocMC0wsOu6YKzStI2eFBDt+Td5O8seVC/oxgRIHqCcl5sf5ratA1nozQBAuv7tphkHg== -"@oxlint/binding-linux-x64-gnu@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.61.0.tgz#f46b036cf74b382c03f6a00e18cd1ae53096517f" - integrity sha512-SjpS5uYuFoDnDdZPwZE59ndF95AsY47R5MliuneTWR1pDm2CxGJaYXbKULI71t5TVfLQUWmrHEGRL9xvuq6dnA== +"@oxlint/binding-linux-x64-gnu@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.62.0.tgz#73883d8f4096fc7560a8a82d64c3dc8c8d94c392" + integrity sha512-5z25jcAA0gfKyVwz71A0VXgaPlocPoTAxhlv/hgoK6tlCrfoNuw7haWbDHvGMfjXhdic4EqVXGRv5XsTqFnbRQ== -"@oxlint/binding-linux-x64-musl@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-x64-musl/-/binding-linux-x64-musl-1.61.0.tgz#cff6c4c24de0863cecbce1c1f43adcafacd9011d" - integrity sha512-gGfAeGD4sNJGILZbc/yKcIimO9wQnPMoYp9swAaKeEtwsSQAbU+rsdQze5SBtIP6j0QDzeYd4XSSUCRCF+LIeQ== +"@oxlint/binding-linux-x64-musl@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-linux-x64-musl/-/binding-linux-x64-musl-1.62.0.tgz#8ea90209143c1d565384efc486d6e26c5b0d2b29" + integrity sha512-IWpHmMB6ZDllPvqWDkG6AmXrN7JF5e/c4g/0PuURsmlK+vHoYZPB70rr4u1bn3I4LsKCSpqqfveyx6UCOC8wdg== -"@oxlint/binding-openharmony-arm64@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-openharmony-arm64/-/binding-openharmony-arm64-1.61.0.tgz#9fdd87fc10c2c51c510470f7c4136ab2df9ee7b3" - integrity sha512-OlVT0LrG/ct33EVtWRyR+B/othwmDWeRxfi13wUdPeb3lAT5TgTcFDcfLfarZtzB4W1nWF/zICMgYdkggX2WmQ== +"@oxlint/binding-openharmony-arm64@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-openharmony-arm64/-/binding-openharmony-arm64-1.62.0.tgz#bdbea8d2132eea6887696bc64d2579b20909d29a" + integrity sha512-fjlSxxrD5pA594vkyikCS9MnPRjQawW6/BLgyTYkO+73wwPlYjkcZ7LSd974l0Q2zkHQmu4DPvJFLYA7o8xrxQ== -"@oxlint/binding-win32-arm64-msvc@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.61.0.tgz#25a87f62859a6703f2520d665f98f6c0f88a70e0" - integrity sha512-vI//NZPJk6DToiovPtaiwD4iQ7kO1r5ReWQD0sOOyKRtP3E2f6jxin4uvwi3OvDzHA2EFfd7DcZl5dtkQh7g1w== +"@oxlint/binding-win32-arm64-msvc@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.62.0.tgz#fc219c7a1c6e4dfd59194837f0f513622f60962d" + integrity sha512-EiFXr8loNS0Ul3Gu80+9nr1T8jRmnKocqmHHg16tj5ZqTgUXyb97l2rrspVHdDluyFn9JfR4PoJFdNzw4paHww== -"@oxlint/binding-win32-ia32-msvc@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.61.0.tgz#06f7df42b074f28ef27540d38be69875e5d5afcb" - integrity sha512-0ySj4/4zd2XjePs3XAQq7IigIstN4LPQZgCyigX5/ERMLjdWAJfnxcTsrtxZxuij8guJW8foXuHmhGxW0H4dDA== +"@oxlint/binding-win32-ia32-msvc@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.62.0.tgz#b735294b07c0e2803fcb7a09426e135d1030ed3d" + integrity sha512-IgOFvL73li1bFgab+hThXYA0N2Xms2kV2MvZN95cebV+fmrZ9AVui1JSxfeeqRLo3CpPxKZlzhyq4G0cnaAvIw== -"@oxlint/binding-win32-x64-msvc@1.61.0": - version "1.61.0" - resolved "https://registry.yarnpkg.com/@oxlint/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.61.0.tgz#7bc84b261bb8b515ade20f1044547cc8bbee44ce" - integrity sha512-0xgSiyeqDLDZxXoe9CVJrOx3TUVsfyoOY7cNi03JbItNcC9WCZqrSNdrAbHONxhSPaVh/lzfnDcON1RqSUMhHw== +"@oxlint/binding-win32-x64-msvc@1.62.0": + version "1.62.0" + resolved "https://registry.yarnpkg.com/@oxlint/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.62.0.tgz#872115161dcac8d91b45e624b3a2dd01ad24747a" + integrity sha512-6hMpyDWQ2zGA1OXFKBrdYMUveUCO8UJhkO6JdwZPd78xIdHZNhjx+pib+4fC2Cljuhjyl0QwA2F3df/bs4Bp6A== "@parcel/watcher-android-arm64@2.5.1": version "2.5.1" @@ -15031,69 +15031,69 @@ own-keys@^1.0.1: object-keys "^1.1.1" safe-push-apply "^1.0.0" -oxfmt@0.46.0: - version "0.46.0" - resolved "https://registry.yarnpkg.com/oxfmt/-/oxfmt-0.46.0.tgz#33789146055129820102203b4397eb4932ada687" - integrity sha512-CopwJOwPAjZ9p76fCvz+mSOJTw9/NY3cSksZK3VO/bUQ8UoEcketNgUuYS0UB3p+R9XnXe7wGGXUmyFxc7QxJA== +oxfmt@0.47.0: + version "0.47.0" + resolved "https://registry.yarnpkg.com/oxfmt/-/oxfmt-0.47.0.tgz#34faf46a5f7c22138ac9d566ac28be7c52e8ca98" + integrity sha512-OFbkbzxKCpooQEnRmpTDnuwTX8KHXzZTQ4Df/hz85fpS67Pl+lxPEFvUtin56HIIS0B1k4X8oIzTXRZPufA2CA== dependencies: tinypool "2.1.0" optionalDependencies: - "@oxfmt/binding-android-arm-eabi" "0.46.0" - "@oxfmt/binding-android-arm64" "0.46.0" - "@oxfmt/binding-darwin-arm64" "0.46.0" - "@oxfmt/binding-darwin-x64" "0.46.0" - "@oxfmt/binding-freebsd-x64" "0.46.0" - "@oxfmt/binding-linux-arm-gnueabihf" "0.46.0" - "@oxfmt/binding-linux-arm-musleabihf" "0.46.0" - "@oxfmt/binding-linux-arm64-gnu" "0.46.0" - "@oxfmt/binding-linux-arm64-musl" "0.46.0" - "@oxfmt/binding-linux-ppc64-gnu" "0.46.0" - "@oxfmt/binding-linux-riscv64-gnu" "0.46.0" - "@oxfmt/binding-linux-riscv64-musl" "0.46.0" - "@oxfmt/binding-linux-s390x-gnu" "0.46.0" - "@oxfmt/binding-linux-x64-gnu" "0.46.0" - "@oxfmt/binding-linux-x64-musl" "0.46.0" - "@oxfmt/binding-openharmony-arm64" "0.46.0" - "@oxfmt/binding-win32-arm64-msvc" "0.46.0" - "@oxfmt/binding-win32-ia32-msvc" "0.46.0" - "@oxfmt/binding-win32-x64-msvc" "0.46.0" + "@oxfmt/binding-android-arm-eabi" "0.47.0" + "@oxfmt/binding-android-arm64" "0.47.0" + "@oxfmt/binding-darwin-arm64" "0.47.0" + "@oxfmt/binding-darwin-x64" "0.47.0" + "@oxfmt/binding-freebsd-x64" "0.47.0" + "@oxfmt/binding-linux-arm-gnueabihf" "0.47.0" + "@oxfmt/binding-linux-arm-musleabihf" "0.47.0" + "@oxfmt/binding-linux-arm64-gnu" "0.47.0" + "@oxfmt/binding-linux-arm64-musl" "0.47.0" + "@oxfmt/binding-linux-ppc64-gnu" "0.47.0" + "@oxfmt/binding-linux-riscv64-gnu" "0.47.0" + "@oxfmt/binding-linux-riscv64-musl" "0.47.0" + "@oxfmt/binding-linux-s390x-gnu" "0.47.0" + "@oxfmt/binding-linux-x64-gnu" "0.47.0" + "@oxfmt/binding-linux-x64-musl" "0.47.0" + "@oxfmt/binding-openharmony-arm64" "0.47.0" + "@oxfmt/binding-win32-arm64-msvc" "0.47.0" + "@oxfmt/binding-win32-ia32-msvc" "0.47.0" + "@oxfmt/binding-win32-x64-msvc" "0.47.0" -oxlint-tsgolint@0.21.1: - version "0.21.1" - resolved "https://registry.yarnpkg.com/oxlint-tsgolint/-/oxlint-tsgolint-0.21.1.tgz#92e87d455283b346087fbff6d0481da9b5276640" - integrity sha512-O2hxiT14C2HJkwzBU6CQBFPoagSd/IcV+Tt3e3UUaXFwbW4BO5DSDPSSboc3UM5MIDY+MLyepvtQwBQafNxWdw== +oxlint-tsgolint@0.22.1: + version "0.22.1" + resolved "https://registry.yarnpkg.com/oxlint-tsgolint/-/oxlint-tsgolint-0.22.1.tgz#740bd4855c022611929732e7f996179e21ea153b" + integrity sha512-YUSGSLUnoolsu8gxISEDio3q1rtsCozwfOzASUn3DT2mR2EeQ93uEEnen7s+6LpF+lyTQFln1pQfqwBh/fsVEg== optionalDependencies: - "@oxlint-tsgolint/darwin-arm64" "0.21.1" - "@oxlint-tsgolint/darwin-x64" "0.21.1" - "@oxlint-tsgolint/linux-arm64" "0.21.1" - "@oxlint-tsgolint/linux-x64" "0.21.1" - "@oxlint-tsgolint/win32-arm64" "0.21.1" - "@oxlint-tsgolint/win32-x64" "0.21.1" + "@oxlint-tsgolint/darwin-arm64" "0.22.1" + "@oxlint-tsgolint/darwin-x64" "0.22.1" + "@oxlint-tsgolint/linux-arm64" "0.22.1" + "@oxlint-tsgolint/linux-x64" "0.22.1" + "@oxlint-tsgolint/win32-arm64" "0.22.1" + "@oxlint-tsgolint/win32-x64" "0.22.1" -oxlint@1.61.0: - version "1.61.0" - resolved "https://registry.yarnpkg.com/oxlint/-/oxlint-1.61.0.tgz#e714742cfe7b815713feb14600b0ffe963d539a4" - integrity sha512-ZC0ALuhDZ6ivOFG+sy0D0pEDN49EvsId98zVlmYdkcXHsEM14m/qTNUEsUpiFiCVbpIxYtVBmmLE87nsbUHohQ== +oxlint@1.62.0: + version "1.62.0" + resolved "https://registry.yarnpkg.com/oxlint/-/oxlint-1.62.0.tgz#952d1ad5f05688c860b1fa3d168f31913fb51bc3" + integrity sha512-1uFkg6HakjsGIpW9wNdeW4/2LOHW9MEkoWjZUTUfQtIHyLIZPYt00w3Sg+H3lH+206FgBPHBbW5dVE5l2ExECQ== optionalDependencies: - "@oxlint/binding-android-arm-eabi" "1.61.0" - "@oxlint/binding-android-arm64" "1.61.0" - "@oxlint/binding-darwin-arm64" "1.61.0" - "@oxlint/binding-darwin-x64" "1.61.0" - "@oxlint/binding-freebsd-x64" "1.61.0" - "@oxlint/binding-linux-arm-gnueabihf" "1.61.0" - "@oxlint/binding-linux-arm-musleabihf" "1.61.0" - "@oxlint/binding-linux-arm64-gnu" "1.61.0" - "@oxlint/binding-linux-arm64-musl" "1.61.0" - "@oxlint/binding-linux-ppc64-gnu" "1.61.0" - "@oxlint/binding-linux-riscv64-gnu" "1.61.0" - "@oxlint/binding-linux-riscv64-musl" "1.61.0" - "@oxlint/binding-linux-s390x-gnu" "1.61.0" - "@oxlint/binding-linux-x64-gnu" "1.61.0" - "@oxlint/binding-linux-x64-musl" "1.61.0" - "@oxlint/binding-openharmony-arm64" "1.61.0" - "@oxlint/binding-win32-arm64-msvc" "1.61.0" - "@oxlint/binding-win32-ia32-msvc" "1.61.0" - "@oxlint/binding-win32-x64-msvc" "1.61.0" + "@oxlint/binding-android-arm-eabi" "1.62.0" + "@oxlint/binding-android-arm64" "1.62.0" + "@oxlint/binding-darwin-arm64" "1.62.0" + "@oxlint/binding-darwin-x64" "1.62.0" + "@oxlint/binding-freebsd-x64" "1.62.0" + "@oxlint/binding-linux-arm-gnueabihf" "1.62.0" + "@oxlint/binding-linux-arm-musleabihf" "1.62.0" + "@oxlint/binding-linux-arm64-gnu" "1.62.0" + "@oxlint/binding-linux-arm64-musl" "1.62.0" + "@oxlint/binding-linux-ppc64-gnu" "1.62.0" + "@oxlint/binding-linux-riscv64-gnu" "1.62.0" + "@oxlint/binding-linux-riscv64-musl" "1.62.0" + "@oxlint/binding-linux-s390x-gnu" "1.62.0" + "@oxlint/binding-linux-x64-gnu" "1.62.0" + "@oxlint/binding-linux-x64-musl" "1.62.0" + "@oxlint/binding-openharmony-arm64" "1.62.0" + "@oxlint/binding-win32-arm64-msvc" "1.62.0" + "@oxlint/binding-win32-ia32-msvc" "1.62.0" + "@oxlint/binding-win32-x64-msvc" "1.62.0" p-cancelable@^2.0.0: version "2.1.1" From 941016f12c61bac51ab076d0ea16c6b4262df449 Mon Sep 17 00:00:00 2001 From: Abhi kumar Date: Tue, 28 Apr 2026 12:22:51 +0530 Subject: [PATCH 04/19] Feat/crosshair series highlight (#11013) * chore: added changes for crosshair sync for tooltip * chore: minor cleanup * chore: updated the core structure * chore: updated the types * chore: minor cleanup * feat: added changes for sereis highlighting on crosshair sync * chore: pr review fixes * chore: handled other cases of groupby * chore: minor changes * feat: tooltip sync across panels (#11114) * feat: added changes for syncing tooltip * fix: fixed other tooltips closing when clicked on top of root tooltip * fix: highlighting first series * chore: removed y-axis sync for tooltip mode * chore: minor fix * chore: fmt fix --- .../charts/ChartWrapper/ChartWrapper.tsx | 4 +- .../visualization/charts/types.ts | 2 + .../panels/BarPanel/BarPanel.tsx | 5 + .../visualization/panels/BarPanel/utils.ts | 1 + .../TimeSeriesPanel/TimeSeriesPanel.tsx | 5 + .../panels/TimeSeriesPanel/utils.ts | 1 + .../components/Tooltip/BarChartTooltip.tsx | 2 + .../components/Tooltip/HistogramTooltip.tsx | 2 + .../components/Tooltip/TimeSeriesTooltip.tsx | 2 + .../lib/uPlotV2/components/Tooltip/utils.ts | 27 ++- frontend/src/lib/uPlotV2/components/types.ts | 3 + .../lib/uPlotV2/config/UPlotSeriesBuilder.ts | 11 +- frontend/src/lib/uPlotV2/config/types.ts | 5 + .../plugins/TooltipPlugin/TooltipPlugin.tsx | 55 +++--- .../TooltipPlugin/syncCursorRegistry.ts | 17 +- .../plugins/TooltipPlugin/syncDisplayHook.ts | 185 ++++++++++++++++++ .../TooltipPlugin/tooltipController.ts | 3 +- .../uPlotV2/plugins/TooltipPlugin/types.ts | 7 + .../plugins/__tests__/TooltipPlugin.test.ts | 77 ++++---- 19 files changed, 345 insertions(+), 69 deletions(-) create mode 100644 frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncDisplayHook.ts diff --git a/frontend/src/container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper.tsx b/frontend/src/container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper.tsx index 905dbb3e50..94c9350a0b 100644 --- a/frontend/src/container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper.tsx +++ b/frontend/src/container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper.tsx @@ -33,6 +33,7 @@ export default function ChartWrapper({ children, layoutChildren, yAxisUnit, + groupBy, customTooltip, pinnedTooltipElement, 'data-testid': testId, @@ -68,8 +69,9 @@ export default function ChartWrapper({ const syncMetadata = useMemo( () => ({ yAxisUnit, + groupBy, }), - [yAxisUnit], + [yAxisUnit, groupBy], ); return ( diff --git a/frontend/src/container/DashboardContainer/visualization/charts/types.ts b/frontend/src/container/DashboardContainer/visualization/charts/types.ts index 8d6587d110..e37cac7d62 100644 --- a/frontend/src/container/DashboardContainer/visualization/charts/types.ts +++ b/frontend/src/container/DashboardContainer/visualization/charts/types.ts @@ -6,6 +6,7 @@ import { DashboardCursorSync, TooltipClickData, } from 'lib/uPlotV2/plugins/TooltipPlugin/types'; +import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; interface BaseChartProps { width: number; @@ -38,6 +39,7 @@ interface UPlotBasedChartProps { interface UPlotChartDataProps { yAxisUnit?: string; decimalPrecision?: PrecisionOption; + groupBy?: BaseAutocompleteData[]; } export interface TimeSeriesChartProps diff --git a/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/BarPanel.tsx b/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/BarPanel.tsx index 41b2fc54c5..6ef8f8c795 100644 --- a/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/BarPanel.tsx +++ b/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/BarPanel.tsx @@ -113,6 +113,10 @@ function BarPanel(props: PanelWrapperProps): JSX.Element { uPlotRef.current = plot; }, []); + const groupBy = useMemo(() => { + return widget.query.builder.queryData[0].groupBy; + }, [widget.query]); + return (
{containerDimensions.width > 0 && containerDimensions.height > 0 && ( @@ -128,6 +132,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element { width={containerDimensions.width} height={containerDimensions.height} layoutChildren={layoutChildren} + groupBy={groupBy} isStackedBarChart={widget.stackedBarChart ?? false} yAxisUnit={widget.yAxisUnit} decimalPrecision={widget.decimalPrecision} diff --git a/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/utils.ts b/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/utils.ts index ef4eb518b6..c28811f9ee 100644 --- a/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/utils.ts +++ b/frontend/src/container/DashboardContainer/visualization/panels/BarPanel/utils.ts @@ -105,6 +105,7 @@ export function prepareBarPanelConfig({ colorMapping: widget.customLegendColors ?? {}, isDarkMode, stepInterval: currentStepInterval, + metric: series.metric, }); }); diff --git a/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel.tsx b/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel.tsx index de1454e17d..263ce0baa3 100644 --- a/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel.tsx +++ b/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel.tsx @@ -104,6 +104,10 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element { widget.decimalPrecision, ]); + const groupBy = useMemo(() => { + return widget.query.builder.queryData[0].groupBy; + }, [widget.query]); + return (
{containerDimensions.width > 0 && containerDimensions.height > 0 && ( @@ -117,6 +121,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element { yAxisUnit={widget.yAxisUnit} decimalPrecision={widget.decimalPrecision} data={chartData as uPlot.AlignedData} + groupBy={groupBy} width={containerDimensions.width} height={containerDimensions.height} layoutChildren={layoutChildren} diff --git a/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/utils.ts b/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/utils.ts index 8bd0fe1e39..aa0816eeed 100644 --- a/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/utils.ts +++ b/frontend/src/container/DashboardContainer/visualization/panels/TimeSeriesPanel/utils.ts @@ -131,6 +131,7 @@ export const prepareUPlotConfig = ({ pointSize: 5, fillMode: widget.fillMode || FillMode.None, isDarkMode, + metric: series.metric, }); }); diff --git a/frontend/src/lib/uPlotV2/components/Tooltip/BarChartTooltip.tsx b/frontend/src/lib/uPlotV2/components/Tooltip/BarChartTooltip.tsx index a4bd3bdeb5..f883281051 100644 --- a/frontend/src/lib/uPlotV2/components/Tooltip/BarChartTooltip.tsx +++ b/frontend/src/lib/uPlotV2/components/Tooltip/BarChartTooltip.tsx @@ -16,6 +16,7 @@ export default function BarChartTooltip(props: BarTooltipProps): JSX.Element { yAxisUnit: props.yAxisUnit ?? '', decimalPrecision: props.decimalPrecision, isStackedBarChart: props.isStackedBarChart, + syncedSeriesIndexes: props.syncedSeriesIndexes, }), [ props.uPlotInstance, @@ -24,6 +25,7 @@ export default function BarChartTooltip(props: BarTooltipProps): JSX.Element { props.yAxisUnit, props.decimalPrecision, props.isStackedBarChart, + props.syncedSeriesIndexes, ], ); diff --git a/frontend/src/lib/uPlotV2/components/Tooltip/HistogramTooltip.tsx b/frontend/src/lib/uPlotV2/components/Tooltip/HistogramTooltip.tsx index 10f6e6739c..7b0d706df8 100644 --- a/frontend/src/lib/uPlotV2/components/Tooltip/HistogramTooltip.tsx +++ b/frontend/src/lib/uPlotV2/components/Tooltip/HistogramTooltip.tsx @@ -17,6 +17,7 @@ export default function HistogramTooltip( uPlotInstance: props.uPlotInstance, yAxisUnit: props.yAxisUnit ?? '', decimalPrecision: props.decimalPrecision, + syncedSeriesIndexes: props.syncedSeriesIndexes, }), [ props.uPlotInstance, @@ -24,6 +25,7 @@ export default function HistogramTooltip( props.dataIndexes, props.yAxisUnit, props.decimalPrecision, + props.syncedSeriesIndexes, ], ); diff --git a/frontend/src/lib/uPlotV2/components/Tooltip/TimeSeriesTooltip.tsx b/frontend/src/lib/uPlotV2/components/Tooltip/TimeSeriesTooltip.tsx index 9b80c1c116..d5a404da2f 100644 --- a/frontend/src/lib/uPlotV2/components/Tooltip/TimeSeriesTooltip.tsx +++ b/frontend/src/lib/uPlotV2/components/Tooltip/TimeSeriesTooltip.tsx @@ -17,6 +17,7 @@ export default function TimeSeriesTooltip( uPlotInstance: props.uPlotInstance, yAxisUnit: props.yAxisUnit ?? '', decimalPrecision: props.decimalPrecision, + syncedSeriesIndexes: props.syncedSeriesIndexes, }), [ props.uPlotInstance, @@ -24,6 +25,7 @@ export default function TimeSeriesTooltip( props.dataIndexes, props.yAxisUnit, props.decimalPrecision, + props.syncedSeriesIndexes, ], ); diff --git a/frontend/src/lib/uPlotV2/components/Tooltip/utils.ts b/frontend/src/lib/uPlotV2/components/Tooltip/utils.ts index 43eec1c5d1..0c8f66477d 100644 --- a/frontend/src/lib/uPlotV2/components/Tooltip/utils.ts +++ b/frontend/src/lib/uPlotV2/components/Tooltip/utils.ts @@ -62,6 +62,7 @@ export function buildTooltipContent({ yAxisUnit, decimalPrecision, isStackedBarChart, + syncedSeriesIndexes, }: { data: AlignedData; series: Series[]; @@ -71,18 +72,34 @@ export function buildTooltipContent({ yAxisUnit: string; decimalPrecision?: PrecisionOption; isStackedBarChart?: boolean; + syncedSeriesIndexes?: number[] | null; }): TooltipContentItem[] { const items: TooltipContentItem[] = []; + const allowedIndexes = + syncedSeriesIndexes != null ? new Set(syncedSeriesIndexes) : null; for (let seriesIndex = 1; seriesIndex < series.length; seriesIndex += 1) { const seriesItem = series[seriesIndex]; if (!seriesItem?.show) { continue; } + if (allowedIndexes != null && !allowedIndexes.has(seriesIndex)) { + continue; + } const dataIndex = dataIndexes[seriesIndex]; - // Skip series with no data at the current cursor position + const isSync = allowedIndexes != null; + if (dataIndex === null) { + if (isSync) { + items.push({ + label: String(seriesItem.label ?? ''), + value: 0, + tooltipValue: 'No Data', + color: resolveSeriesColor(seriesItem.stroke, uPlotInstance, seriesIndex), + isActive: false, + }); + } continue; } @@ -102,6 +119,14 @@ export function buildTooltipContent({ color: resolveSeriesColor(seriesItem.stroke, uPlotInstance, seriesIndex), isActive: seriesIndex === activeSeriesIndex, }); + } else if (isSync) { + items.push({ + label: String(seriesItem.label ?? ''), + value: 0, + tooltipValue: 'No Data', + color: resolveSeriesColor(seriesItem.stroke, uPlotInstance, seriesIndex), + isActive: false, + }); } } diff --git a/frontend/src/lib/uPlotV2/components/types.ts b/frontend/src/lib/uPlotV2/components/types.ts index 228f914fcb..b6afb3499a 100644 --- a/frontend/src/lib/uPlotV2/components/types.ts +++ b/frontend/src/lib/uPlotV2/components/types.ts @@ -58,6 +58,9 @@ export interface TooltipRenderArgs { isPinned: boolean; dismiss: () => void; viaSync: boolean; + /** In Tooltip sync mode, limits which series are rendered in the receiver tooltip. + * null = no filtering; [] = no matches (tooltip hidden upstream); [...] = allowed indexes */ + syncedSeriesIndexes?: number[] | null; } export interface BaseTooltipProps { diff --git a/frontend/src/lib/uPlotV2/config/UPlotSeriesBuilder.ts b/frontend/src/lib/uPlotV2/config/UPlotSeriesBuilder.ts index 7fa716b9b0..122dc44b7f 100644 --- a/frontend/src/lib/uPlotV2/config/UPlotSeriesBuilder.ts +++ b/frontend/src/lib/uPlotV2/config/UPlotSeriesBuilder.ts @@ -9,6 +9,7 @@ import { BarAlignment, ConfigBuilder, DrawStyle, + ExtendedSeries, FillMode, LineInterpolation, LineStyle, @@ -27,7 +28,10 @@ let builders: PathBuilders | null = null; const DEFAULT_LINE_WIDTH = 2; export const POINT_SIZE_FACTOR = 2.5; -export class UPlotSeriesBuilder extends ConfigBuilder { +export class UPlotSeriesBuilder extends ConfigBuilder< + SeriesProps, + ExtendedSeries +> { constructor(props: SeriesProps) { super(props); const pathBuilders = uPlot.paths; @@ -205,8 +209,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder { ); } - getConfig(): Series { - const { scaleKey, label, spanGaps, show = true } = this.props; + getConfig(): ExtendedSeries { + const { scaleKey, label, spanGaps, show = true, metric } = this.props; const resolvedLineColor = this.getLineColor(); @@ -233,6 +237,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder { ...lineConfig, ...pathConfig, points: Object.keys(pointsConfig).length > 0 ? pointsConfig : undefined, + metric, }; } } diff --git a/frontend/src/lib/uPlotV2/config/types.ts b/frontend/src/lib/uPlotV2/config/types.ts index 808260ff60..1d765bbb91 100644 --- a/frontend/src/lib/uPlotV2/config/types.ts +++ b/frontend/src/lib/uPlotV2/config/types.ts @@ -171,6 +171,10 @@ export enum FillMode { None = 'none', } +export type ExtendedSeries = Series & { + metric?: { [key: string]: string }; +}; + export interface SeriesProps extends LineConfig, PointsConfig, BarConfig { scaleKey: string; label?: string; @@ -194,6 +198,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig { fillMode?: FillMode; isDarkMode?: boolean; stepInterval?: number; + metric?: { [key: string]: string }; } export interface LegendItem { diff --git a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/TooltipPlugin.tsx b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/TooltipPlugin.tsx index ed3326eb9c..4b3cd0e7c6 100644 --- a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/TooltipPlugin.tsx +++ b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/TooltipPlugin.tsx @@ -4,7 +4,7 @@ import cx from 'classnames'; import uPlot from 'uplot'; import logEvent from 'api/common/logEvent'; -import { syncCursorRegistry } from './syncCursorRegistry'; +import { createSyncDisplayHook } from './syncDisplayHook'; import { createInitialControllerState, createSetCursorHandler, @@ -107,32 +107,20 @@ export default function TooltipPlugin({ // Enable uPlot's built-in cursor sync when requested so that // crosshair / tooltip can follow the dashboard-wide cursor. + let removeSyncDisplayHook: (() => void) | null = null; if (syncMode !== DashboardCursorSync.None && config.scales[0]?.props.time) { config.setCursor({ - sync: { key: syncKey, scales: ['x', 'y'] }, + sync: { + key: syncKey, + scales: + syncMode === DashboardCursorSync.Crosshair ? ['x', 'y'] : ['x', null], + }, }); - // Show the horizontal crosshair only when the receiving panel shares - // the same y-axis unit as the source panel. When this panel is the - // source (cursor.event != null) the line is always shown and this - // panel's metadata is written to the registry so receivers can read it. - config.addHook('setCursor', (u: uPlot): void => { - const yCursorEl = u.root.querySelector('.u-cursor-y'); - if (!yCursorEl) { - return; - } - - if (u.cursor.event != null) { - // This panel is the source — publish metadata and always show line. - syncCursorRegistry.setMetadata(syncKey, syncMetadata); - yCursorEl.style.display = ''; - } else { - // This panel is receiving sync — show only if units match. - const sourceMeta = syncCursorRegistry.getMetadata(syncKey); - yCursorEl.style.display = - sourceMeta?.yAxisUnit === syncMetadata?.yAxisUnit ? '' : 'none'; - } - }); + removeSyncDisplayHook = config.addHook( + 'setCursor', + createSyncDisplayHook(syncKey, syncMetadata, controller), + ); } // Dismiss the tooltip when the user clicks / presses a key @@ -140,7 +128,12 @@ export default function TooltipPlugin({ const onOutsideInteraction = (event: Event): void => { const target = event.target as Node; if (!containerRef.current?.contains(target)) { - dismissTooltip(); + // Don't dismiss if the click landed inside any other pinned tooltip. + const isInsideAnyPinnedTooltip = + (target as Element).closest?.('[data-pinned="true"]') != null; + if (!isInsideAnyPinnedTooltip) { + dismissTooltip(); + } } }; @@ -206,6 +199,16 @@ export default function TooltipPlugin({ if (!controller.hoverActive || !plot) { return null; } + // In Tooltip sync mode, suppress the receiver tooltip entirely when + // no receiver series match the source panel's focused series. + if ( + syncTooltipWithDashboard && + controller.cursorDrivenBySync && + Array.isArray(controller.syncedSeriesIndexes) && + controller.syncedSeriesIndexes.length === 0 + ) { + return null; + } return renderRef.current({ uPlotInstance: plot, dataIndexes: controller.seriesIndexes, @@ -213,6 +216,7 @@ export default function TooltipPlugin({ isPinned: controller.pinned, dismiss: dismissTooltip, viaSync: controller.cursorDrivenBySync, + syncedSeriesIndexes: controller.syncedSeriesIndexes, }); } @@ -443,6 +447,7 @@ export default function TooltipPlugin({ removeSetSeriesHook(); removeSetLegendHook(); removeSetCursorHook(); + removeSyncDisplayHook?.(); if (overClickHandler) { const plot = getPlot(controller); plot?.over.removeEventListener('click', overClickHandler); @@ -505,7 +510,7 @@ export default function TooltipPlugin({ isHovering, contents, ]); - const isTooltipVisible = isHovering || tooltipBody != null; + const isTooltipVisible = tooltipBody != null; if (!hasPlot) { return null; diff --git a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncCursorRegistry.ts b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncCursorRegistry.ts index f96732bd79..ffed4c88c7 100644 --- a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncCursorRegistry.ts +++ b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncCursorRegistry.ts @@ -9,9 +9,13 @@ import type { TooltipSyncMetadata } from './types'; * * Receivers use this to make decisions such as: * - Whether to show the horizontal crosshair line (matching yAxisUnit) - * - Future: what to render inside the tooltip (matching groupBy, etc.) + * - Which series to highlight when panels share the same groupBy */ const metadataBySyncKey = new Map(); +const activeSeriesMetricBySyncKey = new Map< + string, + Record | null +>(); export const syncCursorRegistry = { setMetadata(syncKey: string, metadata: TooltipSyncMetadata | undefined): void { @@ -21,4 +25,15 @@ export const syncCursorRegistry = { getMetadata(syncKey: string): TooltipSyncMetadata | undefined { return metadataBySyncKey.get(syncKey); }, + + setActiveSeriesMetric( + syncKey: string, + metric: Record | null, + ): void { + activeSeriesMetricBySyncKey.set(syncKey, metric); + }, + + getActiveSeriesMetric(syncKey: string): Record | null { + return activeSeriesMetricBySyncKey.get(syncKey) ?? null; + }, }; diff --git a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncDisplayHook.ts b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncDisplayHook.ts new file mode 100644 index 0000000000..9ef2ba8859 --- /dev/null +++ b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/syncDisplayHook.ts @@ -0,0 +1,185 @@ +import uPlot from 'uplot'; + +import type { ExtendedSeries } from '../../config/types'; +import { syncCursorRegistry } from './syncCursorRegistry'; +import type { TooltipControllerState, TooltipSyncMetadata } from './types'; + +/** + * Returns the dimension keys present in both groupBy arrays. + * An empty result means no overlap — series highlighting should not run. + * + * exact [A, B] vs [A, B] → [A, B] one match + * subset [A] vs [A, B] → [A] multiple receiver series may match + * superset [A, B] vs [A] → [A] one receiver series matches + * partial [A, B] vs [B, C] → [B] + */ +function getCommonGroupByKeys( + a: TooltipSyncMetadata['groupBy'], + b: TooltipSyncMetadata['groupBy'], +): string[] { + if ( + !Array.isArray(a) || + a.length === 0 || + !Array.isArray(b) || + b.length === 0 + ) { + return []; + } + const bKeys = new Set(b.map((g) => g.key)); + return a.filter((g) => bKeys.has(g.key)).map((g) => g.key); +} + +/** + * Returns the 1-based indexes of every series whose metric matches + * sourceMetric on all commonKeys. + */ +function findMatchingSeriesIndexes( + series: uPlot.Series[], + sourceMetric: Record, + commonKeys: string[], +): number[] { + return series.reduce((acc, s, i) => { + if (i === 0) { + return acc; + } + const metric = (s as ExtendedSeries).metric; + if ( + metric != null && + commonKeys.every((key) => metric[key] === sourceMetric[key]) + ) { + acc.push(i); + } + return acc; + }, []); +} + +function applySourceSync({ + uPlotInstance, + syncKey, + syncMetadata, + focusedSeriesIndex, +}: { + uPlotInstance: uPlot; + syncKey: string; + syncMetadata: TooltipSyncMetadata | undefined; + focusedSeriesIndex: number | null; +}): void { + syncCursorRegistry.setMetadata(syncKey, syncMetadata); + const focusedSeries = + focusedSeriesIndex != null + ? (uPlotInstance.series[focusedSeriesIndex] as ExtendedSeries) + : null; + syncCursorRegistry.setActiveSeriesMetric( + syncKey, + focusedSeries?.metric ?? null, + ); +} + +/** + * Returns: + * null – no groupBy filtering configured or cursor off-chart (no-op for tooltip) + * [] – groupBy configured but no receiver series match the source (hide synced tooltip) + * number[] – 1-based indexes of matching receiver series (show only these) + */ +function applyReceiverSync({ + uPlotInstance, + yCrosshairEl, + syncKey, + syncMetadata, + sourceMetadata, + commonKeys, +}: { + uPlotInstance: uPlot; + yCrosshairEl: HTMLElement; + syncKey: string; + syncMetadata: TooltipSyncMetadata | undefined; + sourceMetadata: TooltipSyncMetadata | undefined; + commonKeys: string[]; +}): number[] | null { + yCrosshairEl.style.display = + sourceMetadata?.yAxisUnit === syncMetadata?.yAxisUnit ? '' : 'none'; + + if (commonKeys.length === 0) { + return null; + } + + if ((uPlotInstance.cursor.left ?? -1) < 0) { + uPlotInstance.setSeries(null, { focus: false }); + return null; + } + + const sourceSeriesMetric = syncCursorRegistry.getActiveSeriesMetric(syncKey); + if (sourceSeriesMetric == null) { + uPlotInstance.setSeries(null, { focus: false }); + return []; + } + + const matchingIdxs = findMatchingSeriesIndexes( + uPlotInstance.series, + sourceSeriesMetric, + commonKeys, + ); + + if (matchingIdxs.length === 0) { + uPlotInstance.setSeries(null, { focus: false }); + return []; + } + + uPlotInstance.setSeries(matchingIdxs[0], { focus: true }); + + return matchingIdxs; +} + +export function createSyncDisplayHook( + syncKey: string, + syncMetadata: TooltipSyncMetadata | undefined, + controller: TooltipControllerState, +): (u: uPlot) => void { + // Cached once — avoids a DOM query on every cursor move. + let yCrosshairEl: HTMLElement | null = null; + + // groupBy on both panels is stable (set at config time). Recompute the + // intersection only when the source panel's groupBy reference changes. + let lastSourceGroupBy: TooltipSyncMetadata['groupBy']; + let cachedCommonKeys: string[] = []; + + return (u: uPlot): void => { + yCrosshairEl ??= u.root.querySelector('.u-cursor-y'); + if (!yCrosshairEl) { + return; + } + + if (u.cursor.event != null) { + controller.syncedSeriesIndexes = null; + applySourceSync({ + uPlotInstance: u, + syncKey, + syncMetadata, + focusedSeriesIndex: controller.focusedSeriesIndex, + }); + yCrosshairEl.style.display = ''; + return; + } + + // Read metadata once and pass it down — avoids a second registry lookup + // inside applyReceiverSync. + const sourceMetadata = syncCursorRegistry.getMetadata(syncKey); + + if (sourceMetadata?.groupBy !== lastSourceGroupBy) { + lastSourceGroupBy = sourceMetadata?.groupBy; + cachedCommonKeys = getCommonGroupByKeys( + sourceMetadata?.groupBy, + syncMetadata?.groupBy, + ); + } + + controller.syncedSeriesIndexes = applyReceiverSync({ + uPlotInstance: u, + yCrosshairEl, + syncKey, + syncMetadata, + sourceMetadata, + commonKeys: cachedCommonKeys, + }); + }; +} diff --git a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/tooltipController.ts b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/tooltipController.ts index 88ee312099..87a69d3ea4 100644 --- a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/tooltipController.ts +++ b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/tooltipController.ts @@ -27,6 +27,7 @@ export function createInitialControllerState(): TooltipControllerState { verticalOffset: 0, seriesIndexes: [], focusedSeriesIndex: null, + syncedSeriesIndexes: null, cursorDrivenBySync: false, plotWithinViewport: false, windowWidth: window.innerWidth - WINDOW_OFFSET, @@ -184,7 +185,7 @@ export function createSetLegendHandler( return; } - const newSeriesIndexes = plot.cursor.idxs.slice(); + const newSeriesIndexes = [...plot.cursor.idxs]; const isAnySeriesActive = newSeriesIndexes.some((v, i) => i > 0 && v != null); const previousCursorDrivenBySync = controller.cursorDrivenBySync; diff --git a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/types.ts b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/types.ts index 78d682dd5b..03c453dcc8 100644 --- a/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/types.ts +++ b/frontend/src/lib/uPlotV2/plugins/TooltipPlugin/types.ts @@ -4,6 +4,7 @@ import type { ReactNode, RefObject, } from 'react'; +import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; import type uPlot from 'uplot'; import type { TooltipRenderArgs } from '../../components/types'; @@ -39,6 +40,7 @@ export interface TooltipLayoutInfo { export interface TooltipSyncMetadata { yAxisUnit?: string; + groupBy?: BaseAutocompleteData[]; } export interface TooltipPluginProps { @@ -95,6 +97,11 @@ export interface TooltipControllerState { verticalOffset: number; seriesIndexes: Array; focusedSeriesIndex: number | null; + /** Receiver-side series filtering for Tooltip sync mode. + * null = no filtering (source panel or no groupBy configured) + * [] = no matching series found → hide the synced tooltip + * [...] = only these 1-based series indexes should appear in the synced tooltip */ + syncedSeriesIndexes: number[] | null; cursorDrivenBySync: boolean; plotWithinViewport: boolean; windowWidth: number; diff --git a/frontend/src/lib/uPlotV2/plugins/__tests__/TooltipPlugin.test.ts b/frontend/src/lib/uPlotV2/plugins/__tests__/TooltipPlugin.test.ts index 034313054e..a40b6176c9 100644 --- a/frontend/src/lib/uPlotV2/plugins/__tests__/TooltipPlugin.test.ts +++ b/frontend/src/lib/uPlotV2/plugins/__tests__/TooltipPlugin.test.ts @@ -210,7 +210,7 @@ describe('TooltipPlugin', () => { expect(container.parentElement).toBe(document.body); const fullscreenRoot = document.createElement('div'); - document.body.appendChild(fullscreenRoot); + document.body.append(fullscreenRoot); act(() => { mockedFullscreenElement = fullscreenRoot; @@ -252,7 +252,7 @@ describe('TooltipPlugin', () => { renderAndActivateHover(config, undefined, { canPinTooltip: true }); const container = screen.getByTestId('tooltip-plugin-container'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); act(() => { document.body.dispatchEvent( @@ -266,7 +266,7 @@ describe('TooltipPlugin', () => { return waitFor(() => { const updated = screen.getByTestId('tooltip-plugin-container'); expect(updated).toBeInTheDocument(); - expect(updated.getAttribute('data-pinned') === 'true').toBe(true); + expect(updated.dataset.pinned === 'true').toBe(true); }); }); @@ -340,7 +340,7 @@ describe('TooltipPlugin', () => { // Wait until the tooltip is actually pinned. await waitFor(() => { const container = screen.getByTestId('tooltip-plugin-container'); - expect(container.getAttribute('data-pinned') === 'true').toBe(true); + expect(container.dataset.pinned === 'true').toBe(true); }); const button = await screen.findByRole('button', { name: 'Dismiss' }); @@ -353,7 +353,7 @@ describe('TooltipPlugin', () => { expect(container).toBeInTheDocument(); expect(container.getAttribute('aria-hidden')).toBe('true'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); expect(container.textContent).toBe(''); }); }); @@ -391,9 +391,7 @@ describe('TooltipPlugin', () => { }); expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); // Simulate data update – should dismiss the pinned tooltip. @@ -405,7 +403,7 @@ describe('TooltipPlugin', () => { const container = screen.getByTestId('tooltip-plugin-container'); expect(container).toBeInTheDocument(); expect(container.getAttribute('aria-hidden')).toBe('true'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); jest.useRealTimers(); }); @@ -443,9 +441,7 @@ describe('TooltipPlugin', () => { }); expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); // Click outside the tooltip container. @@ -459,7 +455,7 @@ describe('TooltipPlugin', () => { expect(container).toBeInTheDocument(); expect(container.getAttribute('aria-hidden')).toBe('true'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); }); jest.useRealTimers(); @@ -498,9 +494,7 @@ describe('TooltipPlugin', () => { }); expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); // Press Escape to release. @@ -515,7 +509,7 @@ describe('TooltipPlugin', () => { const container = screen.getByTestId('tooltip-plugin-container'); expect(container).toBeInTheDocument(); expect(container.getAttribute('aria-hidden')).toBe('true'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); }); jest.useRealTimers(); @@ -541,9 +535,7 @@ describe('TooltipPlugin', () => { await waitFor(() => { expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); }); @@ -560,7 +552,7 @@ describe('TooltipPlugin', () => { await waitFor(() => { const container = screen.getByTestId('tooltip-plugin-container'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); }); jest.useRealTimers(); @@ -580,7 +572,7 @@ describe('TooltipPlugin', () => { const container = screen.getByTestId('tooltip-plugin-container'); // Tooltip should still be hovering (visible), not dismissed. expect(container.getAttribute('aria-hidden')).toBe('false'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); }); it('does not unpin on arbitrary keys that are not Escape or the pin key', async () => { @@ -603,9 +595,7 @@ describe('TooltipPlugin', () => { await waitFor(() => { expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); }); @@ -619,9 +609,7 @@ describe('TooltipPlugin', () => { await waitFor(() => { expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); }); @@ -665,7 +653,7 @@ describe('TooltipPlugin', () => { }); const container = screen.getByTestId('tooltip-plugin-container'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); }); it('does not pin when hover is not active', () => { @@ -699,7 +687,7 @@ describe('TooltipPlugin', () => { // The container exists once the plot is initialised, but it should // be hidden and not pinned since hover was never activated. const container = screen.getByTestId('tooltip-plugin-container'); - expect(container.getAttribute('data-pinned') === 'true').toBe(false); + expect(container.dataset.pinned === 'true').toBe(false); expect(container.getAttribute('aria-hidden')).toBe('true'); }); @@ -723,9 +711,7 @@ describe('TooltipPlugin', () => { await waitFor(() => { expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(false); }); @@ -738,9 +724,7 @@ describe('TooltipPlugin', () => { await waitFor(() => { expect( - screen - .getByTestId('tooltip-plugin-container') - .getAttribute('data-pinned') === 'true', + screen.getByTestId('tooltip-plugin-container').dataset.pinned === 'true', ).toBe(true); }); }); @@ -796,7 +780,7 @@ describe('TooltipPlugin', () => { // ---- Cursor sync ------------------------------------------------------------ describe('cursor sync', () => { - it('enables uPlot cursor sync for time-based scales when mode is Tooltip', () => { + it('enables uPlot cursor sync on x-axis only when mode is Tooltip', () => { const config = createConfigMock(); const setCursorSpy = jest.spyOn(config, 'setCursor'); config.addScale({ scaleKey: 'x', time: true }); @@ -810,6 +794,25 @@ describe('TooltipPlugin', () => { }), ); + expect(setCursorSpy).toHaveBeenCalledWith({ + sync: { key: 'dashboard-sync', scales: ['x', null] }, + }); + }); + + it('enables uPlot cursor sync on both axes when mode is Crosshair', () => { + const config = createConfigMock(); + const setCursorSpy = jest.spyOn(config, 'setCursor'); + config.addScale({ scaleKey: 'x', time: true }); + + render( + React.createElement(TooltipPlugin, { + config, + render: () => null, + syncMode: DashboardCursorSync.Crosshair, + syncKey: 'dashboard-sync', + }), + ); + expect(setCursorSpy).toHaveBeenCalledWith({ sync: { key: 'dashboard-sync', scales: ['x', 'y'] }, }); From 9e5678d6b34093acb8748d63c11405fd6528620f Mon Sep 17 00:00:00 2001 From: Piyush Singariya Date: Tue, 28 Apr 2026 14:19:44 +0530 Subject: [PATCH 05/19] CI: JSON QB E2E Integration Suite (#10863) * feat: enable JSON Path index * fix: contextual path index usage * test: fix unit tests * feat: align negative operators to include other logs * fix: primitive conditions working * fix: indexed tests passing * fix: array type filtering from dynamic arrays * fix: unit tests * fix: remove not used paths from testdata * fix: unit tests * fix: comment * fix: indexed unit tests * fix: array json element comparison * feat: change filtering of dynamic arrays * fix: dynamic array tests * fix: stringified integer value input * fix: better review for test file * fix: negative operator check * ci: lint changes * chore: import tests from older pr * fix: better tests * fix: logs.py * fix: body tests ready * fix: better validations * fix: dynamically change insert stmt for body_v2 availability * test: with higher migrator version * fix: type ambiguity * fix: test * test: updated validation * fix: tons of changes * chore: remove redundent comparison * ci: tests fixed * fix: upgraded collector version * fix: qbtoexpr tests * fix: go sum * chore: upgrade collector version v0.144.3-rc.4 * fix: tests * ci: test fix * revert: remove db binaries * test: selectField tests added * fix: added safeguards in plan generation * fix: name changed to field_map * chore: changes based on review * fix: changes based on review * fix: remove unused promoted fixture * test: integration test fix attempt 1 * fix: json access plan remval of AvailableTypes * fix: invalid index usage on terminal condition * test: added indexed tests separately * test: select order by tests added * fix: update jsontypeexporter * fix: branches should tell missing array types * fix: comment removed * ci: test with updated collector * fix: issue with FuzzyMatching and API failing * test: select orderby works * fix: int64 mapping * fix: replacing JSONIndex with TelemetryFieldKeyIndex * chore: update collector to stable release * chore: update migrator version to stable release * chore: error fix and unused code * chore: go mod tidy * chore: some changes in test to improve quality * test: enhanced tests to work with backtick required key * chore: comment removal * fix: change based on review * fix: change based on review * fix: openapi ci * ci: fix go tests * fix: index needs body prefix * fix: tests * ci: polluted test fix * fix: shift test files * refactor(telemetrytypes): replace JSONDataTypeIndex with TelemetryFieldKeySkipIndex * revert: mcp related changes * feat: check query log as fixture * fix: reuse querier fixtures * chore: rename jsontypeexporter.py * chore: py-fmt * fix: minor fix * fix: py-lint * fix: changes based on review * chore: fmt --- .github/workflows/integrationci.yaml | 3 +- tests/conftest.py | 3 +- tests/fixtures/clickhouse.py | 76 +- tests/fixtures/jsontypes.py | 441 +++++ tests/fixtures/logs.py | 91 +- tests/fixtures/migrator.py | 34 +- tests/fixtures/querier.py | 8 + .../01_logs_json_body_new_qb.py | 1467 +++++++++++++++++ .../tests/querier_json_body/__init__.py | 0 .../tests/querier_json_body/conftest.py | 67 + 10 files changed, 2160 insertions(+), 30 deletions(-) create mode 100644 tests/fixtures/jsontypes.py create mode 100644 tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py create mode 100644 tests/integration/tests/querier_json_body/__init__.py create mode 100644 tests/integration/tests/querier_json_body/conftest.py diff --git a/.github/workflows/integrationci.yaml b/.github/workflows/integrationci.yaml index e3ebcef680..a73bdf68ad 100644 --- a/.github/workflows/integrationci.yaml +++ b/.github/workflows/integrationci.yaml @@ -51,6 +51,7 @@ jobs: - role - rootuser - serviceaccount + - querier_json_body - ttl sqlstore-provider: - postgres @@ -61,7 +62,7 @@ jobs: - 25.5.6 - 25.12.5 schema-migrator-version: - - v0.142.0 + - v0.144.3 postgres-version: - 15 if: | diff --git a/tests/conftest.py b/tests/conftest.py index 496c50a85f..f3097d3340 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,6 +23,7 @@ pytest_plugins = [ "fixtures.notification_channel", "fixtures.alerts", "fixtures.cloudintegrations", + "fixtures.jsontypes", "fixtures.seeder", ] @@ -79,6 +80,6 @@ def pytest_addoption(parser: pytest.Parser): parser.addoption( "--schema-migrator-version", action="store", - default="v0.144.2", + default="v0.144.3", help="schema migrator version", ) diff --git a/tests/fixtures/clickhouse.py b/tests/fixtures/clickhouse.py index 3ec8613fda..8b44d20255 100644 --- a/tests/fixtures/clickhouse.py +++ b/tests/fixtures/clickhouse.py @@ -1,5 +1,6 @@ import os -from collections.abc import Generator +from collections.abc import Callable, Generator +from datetime import datetime from typing import Any import clickhouse_connect @@ -262,3 +263,76 @@ def clickhouse( delete=delete, restore=restore, ) + + +@pytest.fixture(name="check_query_log") +def check_query_log( + signoz: types.SigNoz, +) -> Callable[..., None]: + """ + Returns a callable that flushes system.query_log and asserts that at + least one recent SELECT satisfies check_fn. + + Args: + after_ts: Only consider queries logged after this timestamp. + case_name: Label used in assertion failure messages. + check_fn: Predicate run against each candidate query string. + tables: Filter to queries that touched all of these tables, as + 'db.table' strings (uses hasAll(tables, [...])). + must_contain: Substrings that must appear in the query text (AND-ed). + must_not_contain: Substrings that must not appear in the query text (AND-ed). + limit: How many most-recent queries to examine (default 10). + + Usage: + before = datetime.now(tz=timezone.utc) + # ... trigger the query under test ... + check_query_log( + before, "my.case", + lambda q: "assumeNotNull" in q, + tables=["signoz_logs.distributed_logs_v2"], + ) + """ + + def _check( + after_ts: datetime, + case_name: str, + check_fn: Callable[[str], bool], + *, + tables: list[str] | None = None, + must_contain: list[str] | None = None, + must_not_contain: list[str] | None = None, + limit: int = 10, + ) -> None: + conn = signoz.telemetrystore.conn + conn.command("SYSTEM FLUSH LOGS") + + # Use millisecond precision to avoid timestamp collisions between + # adjacent test cases (second-level precision causes bleed-through). + params: dict = {"after_ms": int(after_ts.timestamp() * 1000)} + conditions = [ + "type = 'QueryFinish'", + "query_kind = 'Select'", + "toUnixTimestamp64Milli(event_time_microseconds) >= %(after_ms)s", + ] + if tables: + params["tables"] = tables + conditions.append("hasAll(tables, %(tables)s)") + for i, pattern in enumerate(must_contain or []): + key = f"mc_{i}" + params[key] = pattern + conditions.append(f"position(query, %({key})s) > 0") + for i, pattern in enumerate(must_not_contain or []): + key = f"mnc_{i}" + params[key] = pattern + conditions.append(f"position(query, %({key})s) = 0") + + where = " AND ".join(conditions) + result = conn.query( + f"SELECT query FROM system.query_log WHERE {where} ORDER BY event_time_microseconds DESC LIMIT {limit}", + parameters=params, + ) + queries = [row[0] for row in result.result_rows] + assert queries, f"No matching SELECT in system.query_log for case '{case_name}'" + assert all(check_fn(q) for q in queries), f"query_log check failed for case '{case_name}'.\n" + "Queries:\n" + "\n---\n".join(queries) + + return _check diff --git a/tests/fixtures/jsontypes.py b/tests/fixtures/jsontypes.py new file mode 100644 index 0000000000..d7d97ad092 --- /dev/null +++ b/tests/fixtures/jsontypes.py @@ -0,0 +1,441 @@ +""" +Simpler version of metadataexporter for exporting jsontypes for test fixtures. +This exports JSON type metadata to the path_types table by parsing JSON bodies +and extracting all paths with their types, similar to how the real metadataexporter works. +""" + +import datetime +import json +from abc import ABC +from collections.abc import Callable, Generator +from http import HTTPStatus +from typing import ( + Any, +) + +import numpy as np +import pytest +import requests + +from fixtures import types + + +class JSONPathType(ABC): + """Represents a JSON path with its type information""" + + field_name: str + field_data_type: str + last_seen: np.uint64 + signal: str = "logs" + field_context: str = "body" + + def __init__( + self, + field_name: str, + field_data_type: str, + last_seen: datetime.datetime | None = None, + ) -> None: + self.field_name = field_name + self.field_data_type = field_data_type + self.signal = "logs" + self.field_context = "body" + if last_seen is None: + last_seen = datetime.datetime.now() + self.last_seen = np.uint64(int(last_seen.timestamp() * 1e9)) + + def np_arr(self) -> np.array: + """Return path type data as numpy array for database insertion""" + return np.array([self.signal, self.field_context, self.field_name, self.field_data_type, self.last_seen]) + + +# Constants matching metadataexporter +ARRAY_SEPARATOR = "[]." # Used in paths like "education[].name" +ARRAY_SUFFIX = "[]" # Used when traversing into array element objects + + +def _infer_array_type_from_type_strings(types: list[str]) -> str | None: + """ + Infer array type from a list of pre-classified type strings. + Matches metadataexporter's inferArrayMask logic. + + Internal type strings are: "JSON", "String", "Bool", "Float64", "Int64" + + SuperTyping rules (matching Go inferArrayMask): + - JSON alone → []json + - JSON + any primitive → []dynamic + - String alone → []string; String + other → []dynamic + - Float64 wins over Int64 and Bool + - Int64 wins over Bool + - Bool alone → []bool + """ + if len(types) == 0: + return None + + unique = set(types) + + has_json = "JSON" in unique + # hasPrimitive mirrors Go: (hasJSON && len(unique) > 1) || (!hasJSON && len(unique) > 0) + has_primitive = (has_json and len(unique) > 1) or (not has_json and len(unique) > 0) + + if has_json: + if not has_primitive: + return "[]json" + return "[]dynamic" + + # ---- Primitive Type Resolution (Float > Int > Bool) ---- + if "String" in unique: + if len(unique) > 1: + return "[]dynamic" + return "[]string" + + if "Float64" in unique: + return "[]float64" + if "Int64" in unique: + return "[]int64" + if "Bool" in unique: + return "[]bool" + + return "[]dynamic" + + +def _infer_array_type(elements: list[Any]) -> str | None: + """ + Infer array type from raw Python list elements. + Classifies each element then delegates to _infer_array_type_from_type_strings. + """ + if len(elements) == 0: + return None + + types = [] + for elem in elements: + if elem is None: + continue + if isinstance(elem, dict): + types.append("JSON") + elif isinstance(elem, str): + types.append("String") + elif isinstance(elem, bool): # must be before int (bool is subclass of int) + types.append("Bool") + elif isinstance(elem, float): + types.append("Float64") + elif isinstance(elem, int): + types.append("Int64") + + return _infer_array_type_from_type_strings(types) + + +def _python_type_to_clickhouse_type(value: Any) -> str: + """ + Convert Python type to ClickHouse JSON type string. + Maps Python types to ClickHouse JSON data types. + Matches metadataexporter's mapPCommonValueTypeToDataType. + """ + if isinstance(value, bool): + return "bool" + elif isinstance(value, int): + return "int64" + elif isinstance(value, float): + return "float64" + elif isinstance(value, str): + return "string" + elif isinstance(value, list): + # Use the sophisticated array type inference + array_type = _infer_array_type(value) + return array_type if array_type else "[]dynamic" + elif isinstance(value, dict): + return "json" + else: + return "string" # Default fallback + + +def _extract_json_paths( + obj: Any, + current_path: str = "", + path_types: dict[str, set[str]] | None = None, + level: int = 0, +) -> dict[str, set[str]]: + """ + Recursively extract all paths and their types from a JSON object. + Matches metadataexporter's analyzePValue logic. + + Args: + obj: The JSON object to traverse + current_path: Current path being built (e.g., "user.name") + path_types: Dictionary mapping paths to sets of types found + level: Current nesting level (for depth limiting) + + Returns: + Dictionary mapping paths to sets of type strings + """ + if path_types is None: + path_types = {} + + if obj is None: + # Skip null values — matches Go walkNode which errors on ValueTypeEmpty + return path_types + + if isinstance(obj, dict): + # For objects, recurse into keys without recording the object itself as a type. + # Matches Go walkMap which recurses without calling ta.record on the map node. + + for key, value in obj.items(): + # Build the path for this key + if current_path: + new_path = f"{current_path}.{key}" + else: + new_path = key + + # Recurse into the value + _extract_json_paths(value, new_path, path_types, level + 1) + + elif isinstance(obj, list): + # Skip empty arrays + if len(obj) == 0: + return path_types + + # Collect types from array elements (matching Go: types := make([]pcommon.ValueType, 0, s.Len())) + types = [] + + for item in obj: + if isinstance(item, dict): + # When traversing into array element objects, use ArraySuffix ([]) + # This matches: prefix+ArraySuffix in the Go code + # Example: if current_path is "education", we use "education[]" to traverse into objects + array_prefix = current_path + ARRAY_SUFFIX if current_path else "" + for key, value in item.items(): + if array_prefix: + # Use array separator: education[].name + array_path = f"{array_prefix}.{key}" + else: + array_path = key + # Recurse without increasing level (matching Go behavior) + _extract_json_paths(value, array_path, path_types, level) + types.append("JSON") + elif isinstance(item, list): + # Arrays inside arrays are not supported - skip the whole path + # Matching Go: e.logger.Error("arrays inside arrays are not supported!", ...); return nil + return path_types + elif isinstance(item, str): + types.append("String") + elif isinstance(item, bool): + types.append("Bool") + elif isinstance(item, float): + types.append("Float64") + elif isinstance(item, int): + types.append("Int64") + + # Infer array type from collected types (matching Go: if mask := inferArrayMask(types); mask != 0) + if len(types) > 0: + array_type = _infer_array_type_from_type_strings(types) + if array_type and current_path: + if current_path not in path_types: + path_types[current_path] = set() + path_types[current_path].add(array_type) + + # Primitive value (string, number, bool) + elif current_path: + if current_path not in path_types: + path_types[current_path] = set() + obj_type = _python_type_to_clickhouse_type(obj) + path_types[current_path].add(obj_type) + + return path_types + + +def _parse_json_bodies_and_extract_paths( + json_bodies: list[str], + timestamp: datetime.datetime | None = None, +) -> list[JSONPathType]: + """ + Parse JSON bodies and extract all paths with their types. + This mimics the behavior of metadataexporter. + + Args: + json_bodies: List of JSON body strings to parse + timestamp: Timestamp to use for last_seen (defaults to now) + + Returns: + List of JSONPathType objects with all discovered paths and types + """ + if timestamp is None: + timestamp = datetime.datetime.now() + + # Aggregate all paths and their types across all JSON bodies + all_path_types: dict[str, set[str]] = {} + + for json_body in json_bodies: + try: + parsed = json.loads(json_body) + _extract_json_paths(parsed, "", all_path_types, level=0) + except (json.JSONDecodeError, TypeError): + # Skip invalid JSON + continue + + # Convert to list of JSONPathType objects + # Each path can have multiple types, so we create one JSONPathType per type + path_type_objects: list[JSONPathType] = [] + for path, types_set in all_path_types.items(): + for type_str in types_set: + path_type_objects.append(JSONPathType(field_name=path, field_data_type=type_str, last_seen=timestamp)) + + return path_type_objects + + +@pytest.fixture(name="export_json_types", scope="function") +def export_json_types( + clickhouse: types.TestContainerClickhouse, +) -> Generator[Callable[[list[JSONPathType] | list[str] | list[Any]], None], Any]: + """ + Fixture for exporting JSON type metadata to the path_types table. + This is a simpler version of metadataexporter for test fixtures. + + The function can accept: + 1. List of JSONPathType objects (manual specification) + 2. List of JSON body strings (auto-extract paths) + 3. List of Logs objects (extract from body_json field) + + Usage examples: + # Manual specification + export_json_types([ + JSONPathType(field_name="user.name", field_data_type="string"), + JSONPathType(field_name="user.age", field_data_type="int64"), + ]) + + # Auto-extract from JSON strings + export_json_types([ + '{"user": {"name": "alice", "age": 25}}', + '{"user": {"name": "bob", "age": 30}}', + ]) + + # Auto-extract from Logs objects + export_json_types(logs_list) + """ + + def _export_json_types( + data: list[JSONPathType] | list[str] | list[Any], # List[Logs] but avoiding circular import + ) -> None: + """ + Export JSON type metadata to signoz_metadata.distributed_field_keys table. + This table stores signal, context, path, and type information for body JSON fields. + """ + path_types: list[JSONPathType] = [] + + if len(data) == 0: + return + + # Determine input type and convert to JSONPathType list + first_item = data[0] + + if isinstance(first_item, JSONPathType): + # Already JSONPathType objects + path_types = data # type: ignore + elif isinstance(first_item, str): + # List of JSON strings - parse and extract paths + path_types = _parse_json_bodies_and_extract_paths(data) # type: ignore + else: + # Assume it's a list of Logs objects - extract body_v2 + json_bodies: list[str] = [] + for log in data: # type: ignore + # Try to get body_v2 attribute + if hasattr(log, "body_v2") and log.body_v2: + json_bodies.append(log.body_v2) + elif hasattr(log, "body") and log.body: + # Fallback to body if body_v2 not available + try: + # Try to parse as JSON + json.loads(log.body) + json_bodies.append(log.body) + except (json.JSONDecodeError, TypeError): + pass + + if json_bodies: + path_types = _parse_json_bodies_and_extract_paths(json_bodies) + + if len(path_types) == 0: + return + + clickhouse.conn.insert( + database="signoz_metadata", + table="distributed_field_keys", + data=[path_type.np_arr() for path_type in path_types], + column_names=[ + "signal", + "field_context", + "field_name", + "field_data_type", + "last_seen", + ], + ) + + yield _export_json_types + + # Cleanup - truncate the local table after tests (following pattern from logs fixture) + clickhouse.conn.query(f"TRUNCATE TABLE signoz_metadata.field_keys ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC") + + +@pytest.fixture(name="create_json_index", scope="function") +def create_json_index( + signoz: types.SigNoz, +) -> Generator[Callable[[str, list[dict[str, Any]]], None]]: + """ + Create ClickHouse data-skipping indexes on body_v2 JSON sub-columns via + POST /api/v1/logs/promote_paths. + + **Must be called BEFORE insert_logs** so that newly inserted data parts are + covered by the index and the QB uses the indexed condition path. + + Each entry in `paths` follows the PromotePath API shape: + { + "path": "body.user.name", # must start with "body." + "indexes": [ + { + "fieldDataType": "string", # string | int64 | float64 + "type": "ngrambf_v1(3, 256, 2, 0)", # or "minmax", "tokenbf_v1(...)" + "granularity": 1, + } + ], + } + + Teardown drops every index created during the test by querying + system.data_skipping_indices for matching expressions. + + Example:: + + def test_foo(signoz, get_token, insert_logs, export_json_types, create_json_body_index): + token = get_token(...) + export_json_types(logs_list) + create_json_body_index(token, [ + {"path": "body.user.name", + "indexes": [{"fieldDataType": "string", "type": "ngrambf_v1(3, 256, 2, 0)", "granularity": 1}]}, + {"path": "body.user.age", + "indexes": [{"fieldDataType": "int64", "type": "minmax", "granularity": 1}]}, + ]) + insert_logs(logs_list) # data inserted after index exists — index is built automatically + """ + created_paths: list[str] = [] + + def _create_json_body_index(token: str, paths: list[dict[str, Any]]) -> None: + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/logs/promote_paths"), + headers={"authorization": f"Bearer {token}"}, + json=paths, + timeout=30, + ) + assert response.status_code == HTTPStatus.CREATED, f"Failed to create JSON body indexes: {response.status_code} {response.text}" + for path in paths: + # The API strips the "body." prefix before storing — mirror that here + # so our cleanup query uses the bare path (e.g. "user.name"). + raw = path["path"].removeprefix("body.") + if raw not in created_paths: + created_paths.append(raw) + + yield _create_json_body_index + + if not created_paths: + return + + cluster = signoz.telemetrystore.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER"] + for path in created_paths: + result = signoz.telemetrystore.conn.query(f"SELECT name FROM system.data_skipping_indices WHERE database = 'signoz_logs' AND table = 'logs_v2' AND expr LIKE '%{path}%'") + for (index_name,) in result.result_rows: + signoz.telemetrystore.conn.query(f"ALTER TABLE signoz_logs.logs_v2 ON CLUSTER '{cluster}' DROP INDEX IF EXISTS `{index_name}`") diff --git a/tests/fixtures/logs.py b/tests/fixtures/logs.py index 1ec32fa187..f0fc93d59a 100644 --- a/tests/fixtures/logs.py +++ b/tests/fixtures/logs.py @@ -121,6 +121,8 @@ class Logs(ABC): resources: dict[str, Any] = {}, attributes: dict[str, Any] = {}, body: str = "default body", + body_v2: str | None = None, + body_promoted: str | None = None, severity_text: str = "INFO", trace_id: str = "", span_id: str = "", @@ -166,6 +168,33 @@ class Logs(ABC): # Set body self.body = body + # Set body_v2 - if body is JSON, parse and stringify it, otherwise use empty string + # ClickHouse accepts String input for JSON column + if body_v2 is not None: + self.body_v2 = body_v2 + else: + # Try to parse body as JSON; if successful use it directly, + # otherwise wrap as {"message": body} matching the normalize operator behavior. + try: + json.loads(body) + self.body_v2 = body + except (json.JSONDecodeError, TypeError): + self.body_v2 = json.dumps({"message": body}) + + # Set body_promoted - must be valid JSON + # Tests will explicitly pass promoted column's content, but we validate it + if body_promoted is not None: + # Validate that it's valid JSON + try: + json.loads(body_promoted) + self.body_promoted = body_promoted + except (json.JSONDecodeError, TypeError): + # If invalid, default to empty JSON object + self.body_promoted = "{}" + else: + # Default to empty JSON object (valid JSON) + self.body_promoted = "{}" + # Process resources and attributes self.resources_string = {k: str(v) for k, v in resources.items()} self.resource_json = {} if resource_write_mode == "legacy_only" else dict(self.resources_string) @@ -309,6 +338,8 @@ class Logs(ABC): self.severity_text, self.severity_number, self.body, + self.body_v2, + self.body_promoted, self.attributes_string, self.attributes_number, self.attributes_bool, @@ -436,31 +467,47 @@ def insert_logs_to_clickhouse(conn, logs: list[Logs]) -> None: data=[resource_key.np_arr() for resource_key in resource_keys], ) + all_column_names = [ + "ts_bucket_start", + "resource_fingerprint", + "timestamp", + "observed_timestamp", + "id", + "trace_id", + "span_id", + "trace_flags", + "severity_text", + "severity_number", + "body", + "body_v2", + "body_promoted", + "attributes_string", + "attributes_number", + "attributes_bool", + "resources_string", + "scope_name", + "scope_version", + "scope_string", + "resource", + ] + + result = conn.query("SELECT count() FROM system.columns WHERE database = 'signoz_logs' AND table = 'logs_v2' AND name = 'body_v2'") + has_json_body = result.result_rows[0][0] > 0 + + if has_json_body: + column_names = all_column_names + data = [log.np_arr() for log in logs] + else: + json_body_cols = {"body_v2", "body_promoted"} + keep_indices = [i for i, c in enumerate(all_column_names) if c not in json_body_cols] + column_names = [all_column_names[i] for i in keep_indices] + data = [log.np_arr()[keep_indices] for log in logs] + conn.insert( database="signoz_logs", table="distributed_logs_v2", - data=[log.np_arr() for log in logs], - column_names=[ - "ts_bucket_start", - "resource_fingerprint", - "timestamp", - "observed_timestamp", - "id", - "trace_id", - "span_id", - "trace_flags", - "severity_text", - "severity_number", - "body", - "attributes_string", - "attributes_number", - "attributes_bool", - "resources_string", - "scope_name", - "scope_version", - "scope_string", - "resource", - ], + data=data, + column_names=column_names, ) diff --git a/tests/fixtures/migrator.py b/tests/fixtures/migrator.py index 689c38f4d8..5b3869741e 100644 --- a/tests/fixtures/migrator.py +++ b/tests/fixtures/migrator.py @@ -8,27 +8,32 @@ from fixtures.logger import setup_logger logger = setup_logger(__name__) -@pytest.fixture(name="migrator", scope="package") -def migrator( +def create_migrator( network: Network, clickhouse: types.TestContainerClickhouse, request: pytest.FixtureRequest, pytestconfig: pytest.Config, + cache_key: str = "migrator", + env_overrides: dict | None = None, ) -> types.Operation: """ - Package-scoped fixture for running schema migrations. + Factory function for running schema migrations. + Accepts optional env_overrides to customize the migrator environment. """ def create() -> None: version = request.config.getoption("--schema-migrator-version") client = docker.from_env() + environment = dict(env_overrides) if env_overrides else {} + container = client.containers.run( image=f"signoz/signoz-schema-migrator:{version}", command=f"sync --replication=true --cluster-name=cluster --up= --dsn={clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN']}", detach=True, auto_remove=False, network=network.id, + environment=environment, ) result = container.wait() @@ -47,6 +52,7 @@ def migrator( detach=True, auto_remove=False, network=network.id, + environment=environment, ) result = container.wait() @@ -59,7 +65,7 @@ def migrator( container.remove() - return types.Operation(name="migrator") + return types.Operation(name=cache_key) def delete(_: types.Operation) -> None: pass @@ -70,9 +76,27 @@ def migrator( return reuse.wrap( request, pytestconfig, - "migrator", + cache_key, lambda: types.Operation(name=""), create, delete, restore, ) + + +@pytest.fixture(name="migrator", scope="package") +def migrator( + network: Network, + clickhouse: types.TestContainerClickhouse, + request: pytest.FixtureRequest, + pytestconfig: pytest.Config, +) -> types.Operation: + """ + Package-scoped fixture for running schema migrations. + """ + return create_migrator( + network=network, + clickhouse=clickhouse, + request=request, + pytestconfig=pytestconfig, + ) diff --git a/tests/fixtures/querier.py b/tests/fixtures/querier.py index a7315ca35f..3415a4a8f7 100644 --- a/tests/fixtures/querier.py +++ b/tests/fixtures/querier.py @@ -500,6 +500,14 @@ def get_scalar_columns(response_json: dict) -> list[dict]: return results[0].get("columns", []) +def get_rows(response: requests.Response) -> list[dict[str, Any]]: + assert response.json()["status"] == "success" + results = response.json()["data"]["data"]["results"] + assert len(results) == 1 + # The server returns rows:null (not []) when there are 0 matching logs. + return results[0].get("rows") or [] + + def get_column_data_from_response(response_json: dict, column_name: str) -> list[Any]: results = response_json.get("data", {}).get("data", {}).get("results", []) if not results: diff --git a/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py b/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py new file mode 100644 index 0000000000..8f893b8a5a --- /dev/null +++ b/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py @@ -0,0 +1,1467 @@ +import json +from collections.abc import Callable +from datetime import UTC, datetime, timedelta +from typing import Any + +import requests + +from fixtures import types +from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD +from fixtures.logs import Logs +from fixtures.querier import ( + build_logs_aggregation, + build_order_by, + build_scalar_query, + get_column_data_from_response, + get_rows, + get_scalar_table_data, + make_query_request, +) + + +def _get_bodies(response: requests.Response) -> list[dict[str, Any]]: + return [json.loads(row["data"]["body"]) for row in get_rows(response)] + + +def _run_query_case(signoz: types.SigNoz, token: str, now: datetime, case: dict[str, Any]) -> None: + start_ms = case.get("startMs", int((now - timedelta(seconds=10)).timestamp() * 1000)) + end_ms = case.get("endMs", int(now.timestamp() * 1000)) + + aggregation = case.get("aggregation") + if aggregation and not isinstance(aggregation, list): + aggregations = [build_logs_aggregation(aggregation)] + elif aggregation: + aggregations = aggregation + else: + aggregations = [] + + order = case.get("order") + if order is None and case["requestType"] == "raw": + order = [build_order_by("timestamp", "desc")] + + query = build_scalar_query( + name=case["name"], + signal="logs", + aggregations=aggregations, + group_by=case.get("groupBy"), + order=order, + limit=case.get("limit", 100), + filter_expression=case.get("expression"), + step_interval=case.get("stepInterval") or 60, + ) + + response = make_query_request( + signoz=signoz, + token=token, + start_ms=start_ms, + end_ms=end_ms, + queries=[query], + request_type=case["requestType"], + ) + assert response.status_code == 200, f"HTTP {response.status_code} for case '{case['name']}': {response.text}" + assert case["validate"](response), f"Validation failed for case '{case['name']}': {response.json()}" + + +# ============================================================================ +# Primitive path operations +# ============================================================================ +# +# Data landscape (5 logs, 3 different services): +# log1 — auth-service: full user object, all fields, status=200 +# log2 — auth-service: partial user (no email, no height), status=401 +# log3 — api-gateway: full user object, different values, status=200 +# log4 — healthcheck: completely flat, no user object at all +# log5 — edge-case: user.age is a STRING "unknown" (type ambiguity) +# +# This ensures the QB handles: +# - queries against logs where the path exists vs doesn't exist +# - type ambiguity (user.age: Int64 in some, String in others) +# - structurally different logs in the same query window +# - sparse fields (email/height present in some, absent in others) +# ============================================================================ + + +def test_primitive_path_operations( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + # log1: auth-service — full structure + log1 = json.dumps( + { + "user": { + "name": "alice", + "age": 25, + "height": 5.4, + "active": True, + "email": "alice@test.com", + "address": {"zip": 110001}, + }, + "status": 200, + "http-status": 200, + } + ) + # log2: auth-service — partial user (no email, no height), different status + log2 = json.dumps( + { + "user": { + "name": "bob", + "age": 30, + "active": False, + "address": {"zip": 220002}, + }, + "status": 401, + "http-status": 401, + } + ) + # log3: api-gateway — full user, different values + log3 = json.dumps( + { + "user": { + "name": "charlie", + "age": 35, + "height": 6.1, + "active": True, + "address": {"zip": 330003}, + }, + "status": 200, + "http-status": 200, + } + ) + # log4: healthcheck — completely different structure, no user + log4 = json.dumps( + { + "message": "health check passed", + "status": 200, + } + ) + # log5: legacy-service — user.age is a STRING, not int (type ambiguity) + log5 = json.dumps( + { + "user": { + "name": "diana", + "age": "28", + "active": True, + }, + "status": 500, + } + ) + # log6: zero-service — all default/empty/falsy values + # user.name="" exercises NOT IN / String operator edge cases + # user.age=0 exercises int_lt (0 < 30) + # user.active=False exercises bool EXISTS (false ≠ NULL) + log6 = json.dumps( + { + "user": { + "name": "", + "age": 0, + "active": False, + }, + "status": 0, + } + ) + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=5), + resources={"service.name": "auth-service"}, + body_v2=log1, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "auth-service"}, + body_v2=log2, + body_promoted="", + severity_text="ERROR", + ), + Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "api-gateway"}, + body_v2=log3, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "healthcheck"}, + body_v2=log4, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "legacy-service"}, + body_v2=log5, + body_promoted="", + severity_text="WARN", + ), + Logs( + timestamp=now - timedelta(milliseconds=500), + resources={"service.name": "zero-service"}, + body_v2=log6, + body_promoted="", + severity_text="DEBUG", + ), + ] + + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + cases = [ + # ── positive operators ───────────────────────────────────────────── + { + "name": "prim.string_equal", + "requestType": "raw", + "expression": 'user.name = "alice"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["name"] == "alice", + }, + # log1,log3,log4 have status=200 — log4 is flat with no user object + { + "name": "prim.int_equal_across_shapes", + "requestType": "raw", + "expression": "status = 200", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and all(b["status"] == 200 for b in _get_bodies(r)), + }, + # height only exists in log1,log3 — tests comparison on sparse field + { + "name": "prim.float_gt_sparse_field", + "requestType": "raw", + "expression": "user.height > 5.8", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["height"] == 6.1, + }, + # user.age: Int64 in log1, String "28" in log5, Int64 0 in log6 — type ambiguity. + # Matches: log1 (25 < 30), log5 ("28" via type ambiguity), log6 (0 < 30) → 3 results. + { + "name": "prim.int_lt_with_type_ambiguity", + "requestType": "raw", + "expression": "user.age < 30", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3, + }, + # Bool has distinct handling (not IndexSupported); log4 has no active field + { + "name": "prim.bool_equal_true", + "requestType": "raw", + "expression": "user.active = true", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and {b["user"]["name"] for b in _get_bodies(r)} == {"alice", "charlie", "diana"}, + }, + # CONTAINS uses ILIKE — distinct from = + { + "name": "prim.string_contains", + "requestType": "raw", + "expression": 'user.name CONTAINS "ali"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["name"] == "alice", + }, + # CONTAINS on Float uses toString() wrapping — distinct code path + { + "name": "prim.float_contains", + "requestType": "raw", + "expression": "user.height Contains 5.4", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["height"] == 5.4, + }, + # LIKE — distinct operator (sb.Like) + { + "name": "prim.string_like", + "requestType": "raw", + "expression": "user.name LIKE '%li%'", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and {b["user"]["name"] for b in _get_bodies(r)} == {"alice", "charlie"}, + }, + # REGEXP — distinct operator (match() function) + { + "name": "prim.string_regexp", + "requestType": "raw", + "expression": "user.name REGEXP '^[a-b].*'", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and {b["user"]["name"] for b in _get_bodies(r)} == {"alice", "bob"}, + }, + # IN — distinct operator (sb.In) + { + "name": "prim.string_in", + "requestType": "raw", + "expression": "user.name IN ['alice', 'diana']", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and {b["user"]["name"] for b in _get_bodies(r)} == {"alice", "diana"}, + }, + # BETWEEN — distinct operator + type ambiguity (log5 "28" included) + { + "name": "prim.int_between_with_type_ambiguity", + "requestType": "raw", + "expression": "user.age BETWEEN 25 AND 30", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and {b["user"]["age"] for b in _get_bodies(r)} == {25, 30, "28"}, + }, + # EXISTS on sparse field — only log1 has email + { + "name": "prim.exists_sparse", + "requestType": "raw", + "expression": "user.email EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["email"] == "alice@test.com", + }, + # Deep non-array nesting (a.b.c) + { + "name": "prim.deeply_nested_equal", + "requestType": "raw", + "expression": "user.address.zip = 110001", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["address"]["zip"] == 110001, + }, + # Hyphen in key name — special character path escaping + { + "name": "prim.hyphen_key_equal", + "requestType": "raw", + "expression": "http-status = 200", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(b["http-status"] == 200 for b in _get_bodies(r)), + }, + # ── negative operators ───────────────────────────────────────────── + # != uses assumeNotNull wrapping + { + "name": "prim.not_equal", + "requestType": "raw", + "expression": 'user.name != "alice"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) >= 3 and all(b.get("user", {}).get("name") != "alice" for b in _get_bodies(r)), + }, + # NOT CONTAINS uses NOT ILIKE — distinct from != + { + "name": "prim.not_contains", + "requestType": "raw", + "expression": 'user.name NOT CONTAINS "ali"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) >= 3 and all("ali" not in b.get("user", {}).get("name", "") for b in _get_bodies(r)), + }, + # NOT EXISTS — IS NULL + { + "name": "prim.not_exists_sparse", + "requestType": "raw", + "expression": "user.email NOT EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) >= 2 and all("email" not in b.get("user", {}) for b in _get_bodies(r)), + }, + # NOT IN — complement of prim.string_in + { + "name": "prim.string_not_in", + "requestType": "raw", + "expression": "user.name NOT IN ['alice', 'charlie']", + "aggregation": "count()", + # log1(alice) and log3(charlie) are excluded + # bob, diana, "" (log6), healthcheck (no user) are included + "validate": lambda r: len(get_rows(r)) >= 2 and all(b.get("user", {}).get("name") not in ("alice", "charlie") for b in _get_bodies(r)), + }, + ] + + for case in cases: + case.setdefault("groupBy", None) + case.setdefault("stepInterval", None) + _run_query_case(signoz, token, now, case) + + +# ============================================================================ +# Indexed path behavior +# +# Indexes: body.user.name (String/ngrambf), body.user.age (Int64/minmax). +# log4 has no user; log6 has zero/empty values (age=0, name=""). +# ============================================================================ + + +def test_indexed_paths( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], + create_json_index: Callable[[str, list[dict[str, Any]]], None], + check_query_log: Callable[[datetime, str, Callable[[str], bool]], None], +) -> None: + now = datetime.now(tz=UTC) + + log1 = json.dumps({"user": {"raw-data": {"name": "alice", "age": 25, "active": True}}}) + log2 = json.dumps({"user": {"raw-data": {"name": "bob", "age": 30, "active": False}}}) + log3 = json.dumps({"user": {"raw-data": {"name": "charlie", "age": 35, "active": True}}}) + log4 = json.dumps({"message": "health check passed"}) + log5 = json.dumps({"user": {"raw-data": {"name": "diana", "age": "28", "active": True}}}) + log6 = json.dumps({"user": {"raw-data": {"name": "", "age": 0, "active": False}}}) + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=5), + resources={"service.name": "auth-service"}, + body_v2=log1, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "auth-service"}, + body_v2=log2, + body_promoted="", + severity_text="ERROR", + ), + Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "api-gateway"}, + body_v2=log3, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "healthcheck"}, + body_v2=log4, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "legacy-service"}, + body_v2=log5, + body_promoted="", + severity_text="WARN", + ), + Logs( + timestamp=now - timedelta(milliseconds=500), + resources={"service.name": "zero-service"}, + body_v2=log6, + body_promoted="", + severity_text="DEBUG", + ), + ] + + export_json_types(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + create_json_index( + token, + [ + { + "path": "body.user.raw-data.name", + "indexes": [ + { + "fieldDataType": "string", + "type": "ngrambf_v1(3, 256, 2, 0)", + "granularity": 1, + } + ], + }, + { + "path": "body.user.raw-data.age", + "indexes": [ + { + "fieldDataType": "int64", + "type": "minmax", + "granularity": 1, + } + ], + }, + ], + ) + insert_logs(logs_list) + + cases = [ + # ── EXISTS: !isExistsCheck guard → indexed path skipped → plain IS NOT NULL ────── + # String ngrambf index on body.user.name: EXISTS skips the indexed path. + # IS NOT NULL("") = true → log6 (name="") IS found. + # log4 (no user) is the only exclusion → 5 results. + # query_log check: dynamicElement(...) IS NOT NULL — no assumeNotNull. + { + "name": "indexed.string_exists_skips_index_finds_empty", + "requestType": "raw", + "expression": "user.raw-data.name EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 5 and any(b.get("user", {}).get("raw-data", {}).get("name") == "" for b in _get_bodies(r)), + "check_query": lambda q: "IS NOT NULL" in q and "assumeNotNull" not in q, + }, + # Int64 minmax index on body.user.age: EXISTS skips the indexed path. + # IS NOT NULL(0) = true → log6 (age=0) IS found. + # log4 (no user) is the only exclusion → 5 results. + # query_log check: dynamicElement(...) IS NOT NULL — no assumeNotNull. + { + "name": "indexed.int64_exists_skips_index_finds_zero", + "requestType": "raw", + "expression": "user.raw-data.age EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 5 and any(b.get("user", {}).get("raw-data", {}).get("age") == 0 for b in _get_bodies(r)), + "check_query": lambda q: "IS NOT NULL" in q and "assumeNotNull" not in q, + }, + # body.user.name = "": `assumeNotNull(dynamicElement(..., 'String')) = '' + # AND IS NOT NULL(dynamicElement(..., 'String'))`. + # log6 (name=""): assumeNotNull("")="" matches AND IS NOT NULL("")=true → FOUND. + # log4 (no user): IS NOT NULL(null)=false → NOT found. + # query_log check: both assumeNotNull (indexed condition) and IS NOT NULL + # (zero-value disambiguation) must appear. + { + "name": "indexed.string_empty_eq_disambiguates_absent_field", + "requestType": "raw", + "expression": 'user.raw-data.name = ""', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["raw-data"]["name"] == "", + "check_query": lambda q: "assumeNotNull" in q and "IS NOT NULL" in q, + }, + # ── Non-EXISTS, non-zero value: indexed condition is self-contained ────────────── + # body.user.age = 25: `assumeNotNull(dynamicElement(..., 'Int64')) = 25` → only log1. + # query_log check: assumeNotNull present, no IS NOT NULL (value is non-zero). + { + "name": "indexed.int64_nonzero_eq_uses_index", + "requestType": "raw", + "expression": "user.raw-data.age = 25", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["raw-data"]["age"] == 25, + "check_query": lambda q: "assumeNotNull" in q and "IS NOT NULL" not in q, + }, + # body.user.name = "alice": `assumeNotNull(dynamicElement(..., 'String')) = 'alice'` + # → only log1. + # query_log check: assumeNotNull present, no IS NOT NULL (value is non-empty). + { + "name": "indexed.string_nonempty_eq_uses_index", + "requestType": "raw", + "expression": 'user.raw-data.name = "alice"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["raw-data"]["name"] == "alice", + "check_query": lambda q: "assumeNotNull" in q and "IS NOT NULL" not in q, + }, + ] + + for case in cases: + case.setdefault("groupBy", None) + case.setdefault("stepInterval", None) + before = datetime.now(tz=UTC) + _run_query_case(signoz, token, now, case) + if "check_query" in case: + check_query_log( + before, + case["name"], + case["check_query"], + tables=["signoz_logs.distributed_logs_v2"], + must_contain=["body_v2"], + limit=1, + ) + + +# ============================================================================ +# Select + OrderBy on JSON body paths +# +# 4 logs: unique status (200/201/404/500), unique score (85/72/91/60), +# items[].tags[] gives 2-level array nesting. +# ============================================================================ + + +def test_select_order_by( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + log1 = json.dumps( + { + "status": 200, + "user": {"name": "alice", "score": 85}, + "items": [{"id": 1, "tags": ["a", "b"]}, {"id": 2, "tags": ["c"]}], + } + ) + log2 = json.dumps( + { + "status": 404, + "user": {"name": "bob", "score": 72}, + "items": [{"id": 3, "tags": ["d"]}], + } + ) + log3 = json.dumps( + { + "status": 500, + "user": {"name": "charlie", "score": 91}, + "items": [{"id": 4, "tags": ["e", "f"]}], + } + ) + log4 = json.dumps( + { + "status": 201, + "user": {"name": "diana", "score": 60}, + "items": [{"id": 5, "tags": ["g"]}], + } + ) + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "svc-a"}, + body_v2=log1, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "svc-b"}, + body_v2=log2, + body_promoted="", + severity_text="WARN", + ), + Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "svc-c"}, + body_v2=log3, + body_promoted="", + severity_text="ERROR", + ), + Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "svc-d"}, + body_v2=log4, + body_promoted="", + severity_text="INFO", + ), + ] + + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + start_ms = int((now - timedelta(seconds=10)).timestamp() * 1000) + end_ms = int(now.timestamp() * 1000) + + def _run(case: dict[str, Any]) -> None: + query = build_scalar_query( + name=case["name"], + signal="logs", + aggregations=[build_logs_aggregation("count()")], + order=case["order"], + limit=100, + step_interval=60, + ) + query["spec"]["selectFields"] = case["selectFields"] + response = make_query_request( + signoz=signoz, + token=token, + start_ms=start_ms, + end_ms=end_ms, + queries=[query], + request_type="raw", + ) + assert response.status_code == 200, f"HTTP {response.status_code} for '{case['name']}': {response.text}" + assert case["validate"](response), f"Validation failed for '{case['name']}': {response.json()}" + + # Timestamp-based ordering helper: the 4 logs are inserted at now-4s/3s/2s/1s, + # which map to statuses 200/404/500/201 respectively. + # When ordered by body.status ASC (200→201→404→500) the row timestamps follow + # the pattern: ts[0] < ts[2] < ts[3] < ts[1] (i.e. -4s, -3s, -2s, -1s reordered). + def _ts(r: requests.Response) -> list[int]: + return [row["data"]["timestamp"] for row in get_rows(r)] + + cases = [ + # select array, order by scalar (status not selected — verify via timestamps) + { + "name": "sel_ord.select_items_order_by_status", + "selectFields": [{"name": "items"}], + "order": [build_order_by("status", "asc")], + "validate": lambda r: ( + len(get_rows(r)) == 4 + # items field is present and is a list in every row + and all(isinstance(x, list) for x in get_column_data_from_response(r.json(), "items")) + # status ASC maps to timestamp order: [-4s, -1s, -3s, -2s] + # i.e. ts[0] < ts[2] < ts[3] < ts[1] + and _ts(r)[0] < _ts(r)[2] < _ts(r)[3] < _ts(r)[1] + ), + }, + # select array, order by array field (all arrays are [], order is non-deterministic) + { + "name": "sel_ord.select_items_order_by_items", + "selectFields": [{"name": "items"}], + "order": [build_order_by("items", "asc")], + "validate": lambda r: len(get_rows(r)) == 4 and all(isinstance(x, list) for x in get_column_data_from_response(r.json(), "items")), + }, + # select scalar + array, order by scalar — verify exact status ordering + { + "name": "sel_ord.select_status_and_items_order_by_status", + "selectFields": [{"name": "status"}, {"name": "items"}], + "order": [build_order_by("status", "asc")], + "validate": lambda r: get_column_data_from_response(r.json(), "status") == [200, 201, 404, 500] and all(isinstance(x, list) for x in get_column_data_from_response(r.json(), "items")), + }, + # select scalar + array, order by array field (all arrays are [], order is non-deterministic) + { + "name": "sel_ord.select_status_and_items_order_by_items", + "selectFields": [{"name": "status"}, {"name": "items"}], + "order": [build_order_by("items", "desc")], + "validate": lambda r: len(get_rows(r)) == 4 and set(get_column_data_from_response(r.json(), "status")) == {200, 201, 404, 500} and all(isinstance(x, list) for x in get_column_data_from_response(r.json(), "items")), + }, + ] + + for case in cases: + _run(case) + + +# ============================================================================ +# Array path operations +# ============================================================================ + + +def test_array_path_operations( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + # log1: university-service — rich, multi-entry education with deep nesting + log1 = json.dumps( + { + "education": [ + { + "name": "IIT", + "type": "engineering", + "parameters": [1.65, 2.5, 3.0], + "scores": [90, 85, 95], + "awards": [ + { + "name": "Iron Award", + "type": "sports", + "participated": [ + { + "team": [{"branch": "Civil"}, {"branch": "CS"}], + } + ], + }, + { + "name": "Gold Award", + "type": "academic", + }, + ], + }, + { + "name": "MIT", + "type": 10001, + }, + ], + } + ) + + # log2: enrollment-service — single sparse entry, NO awards, NO type + log2 = json.dumps( + { + "education": [ + { + "name": "Stanford", + "parameters": [1.65, 6.0], + "scores": [95, 88], + } + ], + } + ) + + # log3: research-service — deep nesting, http-events with non-array intermediate + log3 = json.dumps( + { + "education": [ + { + "name": "Harvard", + "type": "research", + "parameters": [7.0, 8.0], + "scores": [60, 65], + "awards": [ + { + "name": "Silver Award", + "type": "research", + "participated": [ + { + "team": [{"branch": "Civil"}, {"branch": "EE"}], + }, + { + "team": [{"branch": "ME"}], + }, + ], + } + ], + } + ], + "http-events": [ + {"request-info": {"host": "example.com"}}, + {"request-info": {"host": "other.com"}}, + ], + } + ) + + # log4: app-service — NO education, deep interests chain + log4 = json.dumps( + { + "interests": [ + { + "entities": [ + { + "reviews": [ + { + "entries": [ + { + "metadata": [ + { + "positions": [ + { + "ratings": [5, 4, 3], + } + ] + } + ] + } + ] + } + ] + } + ] + } + ], + "http-events": [{"request-info": {"host": "test.com"}}], + } + ) + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "university-service"}, + body_v2=log1, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "enrollment-service"}, + body_v2=log2, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "research-service"}, + body_v2=log3, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "app-service"}, + body_v2=log4, + body_promoted="", + severity_text="INFO", + ), + ] + + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + cases = [ + # ── single-hop: education[].field ────────────────────────────────── + # log1,log2,log3 have education[].name; log4 does not + { + "name": "arr.single_exists", + "requestType": "raw", + "expression": "education[].name EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and all(all("name" in e for e in b["education"]) for b in _get_bodies(r)), + }, + { + "name": "arr.single_string_equal", + "requestType": "raw", + "expression": 'education[].name = "IIT"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and any(e["name"] == "IIT" for e in _get_bodies(r)[0]["education"]), + }, + # education[].type: "engineering" (string) in log1[0], 10001 (int!) in log1[1], + # absent in log2, "research" in log3 — type ambiguity + { + "name": "arr.single_type_ambiguity_string", + "requestType": "raw", + "expression": 'education[].type = "engineering"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and any(e.get("type") == "engineering" for e in _get_bodies(r)[0]["education"]), + }, + # Terminal Array(Float64) + Array(Dynamic) dual branch traversal + { + "name": "arr.terminal_float_contains", + "requestType": "raw", + "expression": "education[].parameters CONTAINS 1.65", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(any(1.65 in e.get("parameters", []) for e in b["education"]) for b in _get_bodies(r)), + }, + # IN on terminal Array(Int64) + { + "name": "arr.terminal_int_in", + "requestType": "raw", + "expression": "education[].scores IN [90, 95]", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(any(set(e.get("scores", [])) & {90, 95} for e in b["education"]) for b in _get_bodies(r)), + }, + # ── single-hop negative ──────────────────────────────────────────── + # != wraps NOT at outer arrayExists level + { + "name": "arr.single_not_equal", + "requestType": "raw", + "expression": 'education[].name != "IIT"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and all(not any(e.get("name") == "IIT" for e in b.get("education", [])) for b in _get_bodies(r)), + }, + # NOT EXISTS on array path — log4 has no education at all + { + "name": "arr.single_not_exists", + "requestType": "raw", + "expression": "education[].name NOT EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and all("education" not in b for b in _get_bodies(r)), + }, + # NOT CONTAINS 1.65: log1 (has 1.65 → excluded), log2 (has 1.65 → excluded). + # Matches: log3 (params [7.0, 8.0]), log4 (no education — passes NOT CONTAINS). + # Exercises negation on dual-branch terminal (Array(Float64) + Array(Dynamic)). + { + "name": "arr.terminal_not_contains_float", + "requestType": "raw", + "expression": "education[].parameters NOT CONTAINS 1.65", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(not any(1.65 in e.get("parameters", []) for e in b.get("education", [])) for b in _get_bodies(r)), + }, + # ── double-hop: education[].awards[].field ───────────────────────── + # Only log1 and log3 have awards; log2 has no awards (sparse) + { + "name": "arr.double_exists", + "requestType": "raw", + "expression": "education[].awards[].name EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(any(any("name" in a for a in e.get("awards", [])) for e in b["education"]) for b in _get_bodies(r)), + }, + # Array(JSON) + Array(Dynamic) dual branch at awards[] hop + { + "name": "arr.double_string_equal", + "requestType": "raw", + "expression": 'education[].awards[].name = "Iron Award"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and any(any(a.get("name") == "Iron Award" for a in e.get("awards", [])) for e in _get_bodies(r)[0]["education"]), + }, + # ── multi-hop: 4+ hops deep (participated[].team[].branch) ──────── + { + "name": "arr.multi_hop_branch_contains", + "requestType": "raw", + "expression": 'education[].awards[].participated[].team[].branch Contains "Civil"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(any(any(any(any("Civil" in t.get("branch", "") for t in p.get("team", [])) for p in a.get("participated", [])) for a in e.get("awards", [])) for e in b["education"]) for b in _get_bodies(r)), + }, + # ── non-array intermediate: http-events[].request-info.host ──────── + { + "name": "arr.non_array_intermediate_equal", + "requestType": "raw", + "expression": 'http-events[].request-info.host = "example.com"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and any(e["request-info"]["host"] == "example.com" for e in _get_bodies(r)[0]["http-events"]), + }, + # ── super deep: 6-hop interests[]...ratings ──────────────────────── + { + "name": "arr.super_deep_contains", + "requestType": "raw", + "expression": "interests[].entities[].reviews[].entries[].metadata[].positions[].ratings Contains 4", + "aggregation": "count()", + "validate": lambda r: ( + len(get_rows(r)) == 1 + and any(any(any(any(any(any(4 in pos.get("ratings", []) for pos in meta.get("positions", [])) for meta in entry.get("metadata", [])) for entry in rev.get("entries", [])) for rev in ent.get("reviews", [])) for ent in interest.get("entities", [])) for interest in _get_bodies(r)[0]["interests"]) + ), + }, + ] + + for case in cases: + case.setdefault("groupBy", None) + case.setdefault("stepInterval", None) + _run_query_case(signoz, token, now, case) + + +# ============================================================================ +# Array membership — has, hasAll, hasAny +# ============================================================================ + + +def test_array_membership_operations( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + # log1: full structure + log1 = json.dumps( + { + "tags": ["production", "api", "critical"], + "flags": [True, False, True], + "user": {"permissions": ["read", "write", "admin"]}, + "education": [ + { + "awards": [ + { + "participated": [{"members": ["Piyush", "Tushar", "Raj"]}], + } + ], + "parameters": [1.65, 2.5], + } + ], + } + ) + # log2: different values, sparser + log2 = json.dumps( + { + "tags": ["staging", "api"], + "flags": [False], + "user": {"permissions": ["read"]}, + "education": [ + { + "awards": [ + { + "participated": [{"members": ["Ankit", "Tushar"]}], + } + ], + "parameters": [4.0, 5.0], + } + ], + } + ) + # log3: has tags and permissions but no flags, no education + log3 = json.dumps( + { + "tags": ["production", "web"], + "user": {"permissions": ["read", "write"]}, + } + ) + # log4: completely unrelated structure + log4 = json.dumps( + { + "message": "cron job finished", + } + ) + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "api-svc"}, + body_v2=log1, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "staging-svc"}, + body_v2=log2, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "web-svc"}, + body_v2=log3, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "cron-svc"}, + body_v2=log4, + body_promoted="", + severity_text="INFO", + ), + ] + + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + cases = [ + # has(tags, "production"): log1 and log3 (log2 has "staging", log4 has no tags) + { + "name": "membership.has_string", + "requestType": "raw", + "expression": 'has(tags, "production")', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all("production" in b["tags"] for b in _get_bodies(r)), + }, + # has(flags, true): only log1 (log2 has [false], log3/log4 have no flags) + { + "name": "membership.has_bool_sparse", + "requestType": "raw", + "expression": "has(flags, true)", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and True in _get_bodies(r)[0]["flags"], + }, + # has() on nested array terminal: parameters contains 1.65 → log1 only + { + "name": "membership.has_nested_float", + "requestType": "raw", + "expression": "has(education[].parameters, 1.65)", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and any(1.65 in e.get("parameters", []) for e in _get_bodies(r)[0]["education"]), + }, + { + "name": "membership.hasall_permissions", + "requestType": "raw", + "expression": "hasAll(user.permissions, ['read', 'write'])", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all({"read", "write"}.issubset(set(b["user"]["permissions"])) for b in _get_bodies(r)), + }, + { + "name": "membership.hasany_deep_members", + "requestType": "raw", + "expression": "hasAny(education[].awards[].participated[].members, ['Piyush', 'Tushar'])", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all(any(any(any({"Piyush", "Tushar"} & set(p.get("members", [])) for p in a.get("participated", [])) for a in e.get("awards", [])) for e in b["education"]) for b in _get_bodies(r)), + }, + ] + + for case in cases: + case.setdefault("groupBy", None) + case.setdefault("stepInterval", None) + _run_query_case(signoz, token, now, case) + + +# ============================================================================ +# Message / full-text search +# ============================================================================ + + +def test_message_searches( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + # Plain-text → normalized to {"message": "Payment processed successfully"} + text_log = Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "payment-service"}, + body="Payment processed successfully", + severity_text="INFO", + ) + # JSON with message + json_log = Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "payment-service"}, + body_v2=json.dumps({"message": "Payment failed with error", "code": 500}), + body_promoted="", + severity_text="ERROR", + ) + # Control: has message but no "Payment" + control_log = Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "db-service"}, + body_v2=json.dumps({"message": "Database connection established", "code": 200}), + body_promoted="", + severity_text="INFO", + ) + # No message field at all — just nested data + no_msg_log = Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "metrics-service"}, + body_v2=json.dumps({"metric": "cpu_usage", "value": 78.5, "tags": {"host": "prod-1"}}), + body_promoted="", + severity_text="INFO", + ) + + logs_list = [text_log, json_log, control_log, no_msg_log] + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + def _body_messages(response: requests.Response) -> list[str]: + return [json.loads(row["data"]["body"]).get("message", "") for row in get_rows(response)] + + payment_messages = { + "Payment processed successfully", + "Payment failed with error", + } + + cases = [ + { + "name": "msg.fts_body_contains", + "requestType": "raw", + "expression": 'body CONTAINS "Payment"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and set(_body_messages(r)) == payment_messages, + }, + { + "name": "msg.body_message_contains", + "requestType": "raw", + "expression": 'message CONTAINS "Payment"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and set(_body_messages(r)) == payment_messages, + }, + { + "name": "msg.message_key_contains", + "requestType": "raw", + "expression": 'message CONTAINS "Payment"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and set(_body_messages(r)) == payment_messages, + }, + # FTS — bare keyword + { + "name": "msg.fts_quoted", + "requestType": "raw", + "expression": '"Payment"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all("Payment" in b.get("message", "") for b in _get_bodies(r)), + }, + # = operator via body.message — tests exact match path + { + "name": "msg.body_message_exact", + "requestType": "raw", + "expression": 'message = "Payment processed successfully"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _body_messages(r)[0] == "Payment processed successfully", + }, + # message EXISTS: text_log, json_log, control_log have message; no_msg_log doesn't + { + "name": "msg.message_exists", + "requestType": "raw", + "expression": "message EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and all("message" in b for b in _get_bodies(r)), + }, + # ── negative ────────────────────────────────────────────────────── + # NOT CONTAINS "Payment": control_log and no_msg_log + { + "name": "msg.fts_body_not_contains", + "requestType": "raw", + "expression": 'body NOT CONTAINS "Payment"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) >= 1 and all("Payment" not in msg for msg in _body_messages(r)), + }, + ] + + for case in cases: + case.setdefault("groupBy", None) + case.setdefault("stepInterval", None) + _run_query_case(signoz, token, now, case) + + +# ============================================================================ +# Polluted Data — body-path isolation verified against attribute key pollution +# ============================================================================ + + +def test_polluted_data( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + # Clean baseline — no attribute pollution + log_clean = json.dumps({"user": {"name": "alice"}}) + # Collision: attribute key is the full dotted path "body.user.name" + log_body_attr_clash = json.dumps({"user": {"name": "shadow"}}) + # Ghost: body has NO user.name; only the attribute key "body.user.name" exists + log_ghost = json.dumps({"status": 200}) + # Flat attr: attribute key is "user.name" (without body. prefix) — no collision expected + log_flat_attr = json.dumps({"user": {"name": "charlie"}}) + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=4), + resources={"service.name": "clean-svc"}, + body_v2=log_clean, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=3), + resources={"service.name": "polluted-svc"}, + attributes={"body.user.name": "impostor"}, + body_v2=log_body_attr_clash, + body_promoted="", + severity_text="INFO", + ), + Logs( + timestamp=now - timedelta(seconds=2), + resources={"service.name": "ghost-svc"}, + attributes={"body.user.name": "ghost"}, + body_v2=log_ghost, + body_promoted="", + severity_text="WARN", + ), + Logs( + timestamp=now - timedelta(seconds=1), + resources={"service.name": "flat-attr-svc"}, + attributes={"user.name": "shadow"}, + body_v2=log_flat_attr, + body_promoted="", + severity_text="INFO", + ), + ] + + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + cases = [ + # ── body-path vs attribute isolation ───────────────────────────────── + # user.name IS ambiguous (flat-attr-svc has attribute key "user.name" and + # body.user.name="charlie") so the QB emits a warning and ORs both contexts. + # However, polluted-svc stores the value under the DIFFERENT key + # "body.user.name" (not "user.name"), so: + # body search → no match ("impostor" not in any body) + # attr search → no match ("impostor" under "body.user.name", not "user.name") + # → 0 rows despite the ambiguity warning. Proves that the "body." prefix + # in an attribute key does NOT merge into the body-path lookup. + { + "name": "polluted.body_prefix_isolated_from_literal_attr_value", + "requestType": "raw", + "expression": 'user.name = "impostor"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 0 and r.json().get("data", {}).get("warning") is not None, + }, + # ghost-svc stores the value under attribute key "body.user.name", not + # "user.name", and the body has no user object. Same reasoning → 0 rows. + { + "name": "polluted.ghost_log_not_returned_for_body_query", + "requestType": "raw", + "expression": 'user.name = "ghost"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 0, + }, + # Body queries still find real body values when the value exists only in the + # body (alice only lives in body, not in any attribute) → 1 row. + { + "name": "polluted.clean_body_query_unaffected", + "requestType": "raw", + "expression": 'user.name = "alice"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 1 and _get_bodies(r)[0]["user"]["name"] == "alice", + }, + # EXISTS with an ambiguous key uses OR across contexts: + # body-EXISTS: log_clean(alice), log_body_attr_clash(shadow), log_flat_attr(charlie) → 3 + # attr-EXISTS ("user.name" only): log_flat_attr(shadow) → 1 (already in body set) + # log_ghost is NOT included — its attr key is "body.user.name", not "user.name". + # Union: 3 unique logs (alice, shadow, charlie). + { + "name": "polluted.exists_scoped_to_body_paths_only", + "requestType": "raw", + "expression": "user.name EXISTS", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 3 and {b.get("user", {}).get("name") for b in _get_bodies(r)} == {"alice", "shadow", "charlie"}, + }, + # ── new: OR match across body AND attribute in the same query ────────── + # "shadow" exists in body (log_body_attr_clash: body.user.name="shadow") AND + # in attribute (log_flat_attr: attributes_string["user.name"]="shadow"). + # The ambiguous-OR returns both logs → 2 rows. + { + "name": "polluted.ambiguous_key_or_finds_both_body_and_attr_match", + "requestType": "raw", + "expression": 'user.name = "shadow"', + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and r.json().get("data", {}).get("warning") is not None and {row["data"]["resources_string"].get("service.name") for row in get_rows(r)} == {"polluted-svc", "flat-attr-svc"}, + }, + ] + + for case in cases: + case.setdefault("groupBy", None) + case.setdefault("stepInterval", None) + _run_query_case(signoz, token, now, case) + + +# ============================================================================ +# GroupBy — scalar aggregation +# ============================================================================ +# +# Uses requestType="scalar" (flat table: [group_key..., count]) instead of +# time_series, because the test goal is to verify body JSON path resolution +# in GROUP BY SQL — not time-bucketing semantics. Scalar results are a +# plain list of rows, far simpler to assert than navigating +# aggregations[0]["series"][*]["labels"]. +# +# Data landscape (7 logs, mixed shapes): +# 3 × alice (age=25) — count > 1 exercises the aggregation correctly +# 1 × bob (age=30) +# 1 × charlie (age=35) +# 1 × diana (no age — sparse field) +# 1 × health check (no user at all) +# ============================================================================ + + +def test_groupby_scalar( + signoz: types.SigNoz, + create_user_admin: None, # pylint: disable=unused-argument + get_token: Callable[[str, str], str], + insert_logs: Callable[[list[Logs]], None], + export_json_types: Callable[[list[Logs]], None], +) -> None: + now = datetime.now(tz=UTC) + + logs_data = [ + {"user": {"name": "alice", "age": 25}, "status": 200}, + {"user": {"name": "bob", "age": 30}, "status": 200}, + {"user": {"name": "alice", "age": 25}, "status": 201}, + {"user": {"name": "charlie", "age": 35}, "status": 200}, + {"user": {"name": "alice", "age": 25}, "status": 200}, + {"user": {"name": "diana"}, "status": 500}, + {"message": "health check", "status": 200}, + ] + + logs_list = [ + Logs( + timestamp=now - timedelta(seconds=len(logs_data) - i), + resources={"service.name": "api-service"}, + attributes={}, + body_v2=json.dumps(log_data), + body_promoted="", + severity_text="INFO", + ) + for i, log_data in enumerate(logs_data) + ] + + export_json_types(logs_list) + insert_logs(logs_list) + token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD) + + cases = [ + # Scalar GroupBy: results[0]["data"] = [[group_key, count], ...] + # Simpler to validate than time_series which nests inside aggregations[0]["series"]. + { + "name": "groupby.age", + "requestType": "scalar", + "expression": None, + "groupBy": [{"name": "user.age"}], + "limit": 100, + "aggregation": "count()", + # Each row: [age_value, count]. alice×3→count=3, bob×1→count=1, charlie×1→count=1. + "validate": lambda r: len(rows := {str(row[0]): row[-1] for row in get_scalar_table_data(r.json()) if row}) >= 3 and rows.get("25") == 3 and rows.get("30") == 1 and rows.get("35") == 1, + }, + # Multi-field GroupBy — distinct SQL (multiple group-by columns) + { + "name": "groupby.multi", + "requestType": "scalar", + "expression": None, + "groupBy": [ + {"name": "user.name"}, + {"name": "user.age"}, + ], + "limit": 100, + "aggregation": "count()", + # Each row: [name, age, count]. Verify (alice,25)→3, (bob,30)→1, (charlie,35)→1. + "validate": lambda r: len(pairs := {(str(row[0]), str(row[1])): row[-1] for row in get_scalar_table_data(r.json()) if len(row) >= 3}) >= 3 and pairs.get(("alice", "25")) == 3 and pairs.get(("bob", "30")) == 1 and pairs.get(("charlie", "35")) == 1, + }, + ] + + for case in cases: + _run_query_case(signoz, token, now, case) diff --git a/tests/integration/tests/querier_json_body/__init__.py b/tests/integration/tests/querier_json_body/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/tests/querier_json_body/conftest.py b/tests/integration/tests/querier_json_body/conftest.py new file mode 100644 index 0000000000..b080f0051b --- /dev/null +++ b/tests/integration/tests/querier_json_body/conftest.py @@ -0,0 +1,67 @@ +import pytest +from testcontainers.core.container import Network + +from fixtures import types +from fixtures.migrator import create_migrator +from fixtures.signoz import create_signoz + +UNSUPPORTED_CLICKHOUSE_VERSIONS = {"25.5.6"} + + +def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None: + version = config.getoption("--clickhouse-version") + if version in UNSUPPORTED_CLICKHOUSE_VERSIONS: + skip = pytest.mark.skip(reason=f"JSON body QB tests require ClickHouse > {version}") + for item in items: + item.add_marker(skip) + + +@pytest.fixture(name="migrator", scope="package") +def migrator_json( + network: Network, + clickhouse: types.TestContainerClickhouse, + request: pytest.FixtureRequest, + pytestconfig: pytest.Config, +) -> types.Operation: + """ + Package-scoped migrator with ENABLE_LOGS_MIGRATIONS_V2=1. + """ + return create_migrator( + network=network, + clickhouse=clickhouse, + request=request, + pytestconfig=pytestconfig, + cache_key="migrator-json-body", + env_overrides={ + "ENABLE_LOGS_MIGRATIONS_V2": "1", + }, + ) + + +@pytest.fixture(name="signoz", scope="package") +def signoz_json_body( + network: Network, + migrator: types.Operation, # pylint: disable=unused-argument + zeus: types.TestContainerDocker, + gateway: types.TestContainerDocker, + sqlstore: types.TestContainerSQL, + clickhouse: types.TestContainerClickhouse, + request: pytest.FixtureRequest, + pytestconfig: pytest.Config, +) -> types.SigNoz: + """ + Package-scoped fixture for SigNoz with BODY_JSON_QUERY_ENABLED=true. + """ + return create_signoz( + network=network, + zeus=zeus, + gateway=gateway, + sqlstore=sqlstore, + clickhouse=clickhouse, + request=request, + pytestconfig=pytestconfig, + cache_key="signoz-json-body", + env_overrides={ + "BODY_JSON_QUERY_ENABLED": "true", + }, + ) From 3f95d35ad5510d376dc9831e0dcdd8afd450a327 Mon Sep 17 00:00:00 2001 From: Pandey Date: Tue, 28 Apr 2026 15:31:39 +0530 Subject: [PATCH 06/19] chore: add @therealpandey with @srikanthcvv (#11126) #### Temporary - add @therealpandey with @srikanthcvv --- .github/CODEOWNERS | 56 +++++++++++++++++++++++----------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index db8b89c3fd..dd0a560878 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -52,49 +52,49 @@ go.mod @therealpandey # Querier Owners -/pkg/querier/ @srikanthccv -/pkg/variables/ @srikanthccv -/pkg/types/querybuildertypes/ @srikanthccv -/pkg/types/telemetrytypes/ @srikanthccv -/pkg/querybuilder/ @srikanthccv -/pkg/telemetrylogs/ @srikanthccv -/pkg/telemetrymetadata/ @srikanthccv -/pkg/telemetrymetrics/ @srikanthccv -/pkg/telemetrytraces/ @srikanthccv +/pkg/querier/ @srikanthccv @therealpandey +/pkg/variables/ @srikanthccv @therealpandey +/pkg/types/querybuildertypes/ @srikanthccv @therealpandey +/pkg/types/telemetrytypes/ @srikanthccv @therealpandey +/pkg/querybuilder/ @srikanthccv @therealpandey +/pkg/telemetrylogs/ @srikanthccv @therealpandey +/pkg/telemetrymetadata/ @srikanthccv @therealpandey +/pkg/telemetrymetrics/ @srikanthccv @therealpandey +/pkg/telemetrytraces/ @srikanthccv @therealpandey # Metrics -/pkg/types/metrictypes/ @srikanthccv -/pkg/types/metricsexplorertypes/ @srikanthccv -/pkg/modules/metricsexplorer/ @srikanthccv -/pkg/prometheus/ @srikanthccv +/pkg/types/metrictypes/ @srikanthccv @therealpandey +/pkg/types/metricsexplorertypes/ @srikanthccv @therealpandey +/pkg/modules/metricsexplorer/ @srikanthccv @therealpandey +/pkg/prometheus/ @srikanthccv @therealpandey # APM -/pkg/types/servicetypes/ @srikanthccv -/pkg/types/apdextypes/ @srikanthccv -/pkg/modules/apdex/ @srikanthccv -/pkg/modules/services/ @srikanthccv +/pkg/types/servicetypes/ @srikanthccv @therealpandey +/pkg/types/apdextypes/ @srikanthccv @therealpandey +/pkg/modules/apdex/ @srikanthccv @therealpandey +/pkg/modules/services/ @srikanthccv @therealpandey # Dashboard -/pkg/types/dashboardtypes/ @srikanthccv -/pkg/modules/dashboard/ @srikanthccv +/pkg/types/dashboardtypes/ @srikanthccv @therealpandey +/pkg/modules/dashboard/ @srikanthccv @therealpandey # Rule/Alertmanager -/pkg/types/ruletypes/ @srikanthccv -/pkg/types/alertmanagertypes @srikanthccv -/pkg/alertmanager/ @srikanthccv -/pkg/ruler/ @srikanthccv -/pkg/modules/rulestatehistory/ @srikanthccv -/pkg/types/rulestatehistorytypes/ @srikanthccv +/pkg/types/ruletypes/ @srikanthccv @therealpandey +/pkg/types/alertmanagertypes @srikanthccv @therealpandey +/pkg/alertmanager/ @srikanthccv @therealpandey +/pkg/ruler/ @srikanthccv @therealpandey +/pkg/modules/rulestatehistory/ @srikanthccv @therealpandey +/pkg/types/rulestatehistorytypes/ @srikanthccv @therealpandey # Correlation-adjacent -/pkg/contextlinks/ @srikanthccv -/pkg/types/parsertypes/ @srikanthccv -/pkg/queryparser/ @srikanthccv +/pkg/contextlinks/ @srikanthccv @therealpandey +/pkg/types/parsertypes/ @srikanthccv @therealpandey +/pkg/queryparser/ @srikanthccv @therealpandey # AuthN / AuthZ Owners From ffd493617f08d9fb2cc8fa870b506d0da4092a76 Mon Sep 17 00:00:00 2001 From: Nikhil Mantri Date: Tue, 28 Apr 2026 15:49:26 +0530 Subject: [PATCH 07/19] feat(infra-monitoring): v2 pods list API (#10833) * chore: baseline setup * chore: endpoint detail update * chore: added logic for hosts v3 api * fix: bug fix * chore: disk usage * chore: added validate function * chore: added some unit tests * chore: return status as a string * chore: yarn generate api * chore: removed isSendingK8sAgentsMetricsCode * chore: moved funcs * chore: added validation on order by * chore: added pods list logic * chore: updated openapi yml * chore: updated spec * chore: pods api meta start time * chore: nil pointer check * chore: nil pointer dereference fix in req.Filter * chore: added temporalities of metrics * chore: added pods metrics temporality * chore: unified composite key function * chore: code improvements * chore: added pods list api updates * chore: hostStatusNone added for clarity that this field can be left empty as well in payload * chore: yarn generate api * chore: return errors from getMetadata and lint fix * chore: return errors from getMetadata and lint fix * chore: added hostName logic * chore: modified getMetadata query * chore: add type for response and files rearrange * chore: warnings added passing from queryResponse warning to host lists response struct * chore: added better metrics existence check * chore: added a TODO remark * chore: added required metrics check * chore: distributed samples table to local table change for get metadata * chore: frontend fix * chore: endpoint correction * chore: endpoint modification openapi * chore: escape backtick to prevent sql injection * chore: rearrage * chore: improvements * chore: validate order by to validate function * chore: improved description * chore: added TODOs and made filterByStatus a part of filter struct * chore: ignore empty string hosts in get active hosts * feat(infra-monitoring): v2 hosts list - return counts of active & inactive hosts for custom group by attributes (#10956) * chore: add functionality for showing active and inactive counts in custom group by * chore: bug fix * chore: added subquery for active and total count * chore: ignore empty string hosts in get active hosts * fix: sinceUnixMilli for determining active hosts compute once per request * chore: refactor code * chore: rename HostsList -> ListHosts * chore: rearrangement * chore: inframonitoring types renaming * chore: added types package * chore: file structure further breakdown for clarity * chore: comments correction * chore: removed temporalities * chore: pods code restructuring * chore: comments resolve * chore: added json tag required: true * chore: removed pod metric temporalities * chore: removed internal server error * chore: added status unauthorized * chore: remove a defensive nil map check, the function ensure non-nil map when err nil * chore: cleanup and rename * chore: make sort stable in case of tiebreaker by comparing composite group by keys * chore: regen api client for inframonitoring Co-Authored-By: Claude Opus 4.7 (1M context) * chore: added required tags * chore: added support for pod phase unknown * chore: removed pods - order by phase * chore: improved api description to document -1 as no data in numeric fields * fix: rebase fixes * feat(infra-monitoring): v2 pods list apis - phase counts when custom grouping (#11088) * chore: added phase counts feature * chore: added queries for pod phase counts in custom group by * chore: added unknown phase count * fix: isPodUIDInGroupBy in buildPodRecords * chore: 3 cte --> 2 cte * chore: pod phase with local table of time series as counts * chore: comment correction * chore: corrected comment * chore: value column for samples table added * chore: removed query G for phase counts * chore: rename variable * chore: added PodPhaseNum constants to types * chore: updated comment * chore: formatted file --------- Co-authored-by: Claude Opus 4.7 (1M context) Co-authored-by: Ashwin Bhatkal --- docs/api/openapi.yml | 194 +++++++++- .../services/inframonitoring/index.ts | 88 ++++- .../api/generated/services/sigNoz.schemas.ts | 138 ++++++++ .../FormAlertRules/ChartPreview/index.tsx | 4 +- .../src/container/FormAlertRules/index.tsx | 2 +- .../MetricsExplorer/Explorer/Explorer.tsx | 4 +- .../MetricsExplorer/Inspect/Inspect.tsx | 2 +- .../Inspect/useInspectMetrics.ts | 6 +- .../hooks/queryBuilder/useGetQueryRange.ts | 6 +- frontend/src/pages/LogsExplorer/index.tsx | 2 +- frontend/src/pages/TracesExplorer/index.tsx | 2 +- .../signozapiserver/inframonitoring.go | 21 +- .../implinframonitoring/handler.go | 24 ++ .../implinframonitoring/internaltypes.go | 9 + .../implinframonitoring/module.go | 83 +++++ .../implinframonitoring/pods.go | 334 ++++++++++++++++++ .../implinframonitoring/pods_constants.go | 178 ++++++++++ .../inframonitoring/inframonitoring.go | 2 + pkg/telemetrymetrics/tables.go | 11 + pkg/types/inframonitoringtypes/hosts.go | 2 +- pkg/types/inframonitoringtypes/pods.go | 109 ++++++ .../inframonitoringtypes/pods_constants.go | 55 +++ pkg/types/inframonitoringtypes/pods_test.go | 219 ++++++++++++ 23 files changed, 1478 insertions(+), 17 deletions(-) create mode 100644 pkg/modules/inframonitoring/implinframonitoring/pods.go create mode 100644 pkg/modules/inframonitoring/implinframonitoring/pods_constants.go create mode 100644 pkg/types/inframonitoringtypes/pods.go create mode 100644 pkg/types/inframonitoringtypes/pods_constants.go create mode 100644 pkg/types/inframonitoringtypes/pods_test.go diff --git a/docs/api/openapi.yml b/docs/api/openapi.yml index 669dabd45c..9c83c968bd 100644 --- a/docs/api/openapi.yml +++ b/docs/api/openapi.yml @@ -2474,6 +2474,97 @@ components: - requiredMetricsCheck - endTimeBeforeRetention type: object + InframonitoringtypesPodPhase: + enum: + - pending + - running + - succeeded + - failed + - unknown + - "" + type: string + InframonitoringtypesPodRecord: + properties: + failedPodCount: + type: integer + meta: + additionalProperties: {} + nullable: true + type: object + pendingPodCount: + type: integer + podAge: + format: int64 + type: integer + podCPU: + format: double + type: number + podCPULimit: + format: double + type: number + podCPURequest: + format: double + type: number + podMemory: + format: double + type: number + podMemoryLimit: + format: double + type: number + podMemoryRequest: + format: double + type: number + podPhase: + $ref: '#/components/schemas/InframonitoringtypesPodPhase' + podUID: + type: string + runningPodCount: + type: integer + succeededPodCount: + type: integer + unknownPodCount: + type: integer + required: + - podUID + - podCPU + - podCPURequest + - podCPULimit + - podMemory + - podMemoryRequest + - podMemoryLimit + - podPhase + - pendingPodCount + - runningPodCount + - succeededPodCount + - failedPodCount + - unknownPodCount + - podAge + - meta + type: object + InframonitoringtypesPods: + properties: + endTimeBeforeRetention: + type: boolean + records: + items: + $ref: '#/components/schemas/InframonitoringtypesPodRecord' + nullable: true + type: array + requiredMetricsCheck: + $ref: '#/components/schemas/InframonitoringtypesRequiredMetricsCheck' + total: + type: integer + type: + $ref: '#/components/schemas/InframonitoringtypesResponseType' + warning: + $ref: '#/components/schemas/Querybuildertypesv5QueryWarnData' + required: + - type + - records + - total + - requiredMetricsCheck + - endTimeBeforeRetention + type: object InframonitoringtypesPostableHosts: properties: end: @@ -2500,6 +2591,32 @@ components: - end - limit type: object + InframonitoringtypesPostablePods: + properties: + end: + format: int64 + type: integer + filter: + $ref: '#/components/schemas/Querybuildertypesv5Filter' + groupBy: + items: + $ref: '#/components/schemas/Querybuildertypesv5GroupByKey' + nullable: true + type: array + limit: + type: integer + offset: + type: integer + orderBy: + $ref: '#/components/schemas/Querybuildertypesv5OrderBy' + start: + format: int64 + type: integer + required: + - start + - end + - limit + type: object InframonitoringtypesRequiredMetricsCheck: properties: missingMetrics: @@ -10886,7 +11003,9 @@ paths: five metrics, and pagination via offset/limit. The response type is ''list'' for the default host.name grouping or ''grouped_list'' for custom groupBy keys. Also reports missing required metrics and whether the requested time - range falls before the data retention boundary.' + range falls before the data retention boundary. Numeric metric fields (cpu, + memory, wait, load15, diskUsage) return -1 as a sentinel when no data is available + for that field.' operationId: ListHosts requestBody: content: @@ -10940,6 +11059,79 @@ paths: summary: List Hosts for Infra Monitoring tags: - inframonitoring + /api/v2/infra_monitoring/pods: + post: + deprecated: false + description: 'Returns a paginated list of Kubernetes pods with key metrics: + CPU usage, CPU request/limit utilization, memory working set, memory request/limit + utilization, current pod phase (pending/running/succeeded/failed/unknown), + and pod age (ms since start time). Each pod includes metadata attributes (namespace, + node, workload owner such as deployment/statefulset/daemonset/job/cronjob, + cluster). Supports filtering via a filter expression, custom groupBy to aggregate + pods by any attribute, ordering by any of the six metrics (cpu, cpu_request, + cpu_limit, memory, memory_request, memory_limit), and pagination via offset/limit. + The response type is ''list'' for the default k8s.pod.uid grouping (each row + is one pod with its current phase) or ''grouped_list'' for custom groupBy + keys (each row aggregates pods in the group with per-phase counts: pendingPodCount, + runningPodCount, succeededPodCount, failedPodCount, unknownPodCount derived + from each pod''s latest phase in the window). Also reports missing required + metrics and whether the requested time range falls before the data retention + boundary. Numeric metric fields (podCPU, podCPURequest, podCPULimit, podMemory, + podMemoryRequest, podMemoryLimit, podAge) return -1 as a sentinel when no + data is available for that field.' + operationId: ListPods + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/InframonitoringtypesPostablePods' + responses: + "200": + content: + application/json: + schema: + properties: + data: + $ref: '#/components/schemas/InframonitoringtypesPods' + status: + type: string + required: + - status + - data + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/RenderErrorResponse' + description: Bad Request + "401": + content: + application/json: + schema: + $ref: '#/components/schemas/RenderErrorResponse' + description: Unauthorized + "403": + content: + application/json: + schema: + $ref: '#/components/schemas/RenderErrorResponse' + description: Forbidden + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/RenderErrorResponse' + description: Internal Server Error + security: + - api_key: + - VIEWER + - tokenizer: + - VIEWER + summary: List Pods for Infra Monitoring + tags: + - inframonitoring /api/v2/livez: get: deprecated: false diff --git a/frontend/src/api/generated/services/inframonitoring/index.ts b/frontend/src/api/generated/services/inframonitoring/index.ts index 33b6c6f2f5..072d5a38b9 100644 --- a/frontend/src/api/generated/services/inframonitoring/index.ts +++ b/frontend/src/api/generated/services/inframonitoring/index.ts @@ -13,7 +13,9 @@ import type { import type { InframonitoringtypesPostableHostsDTO, + InframonitoringtypesPostablePodsDTO, ListHosts200, + ListPods200, RenderErrorResponseDTO, } from '../sigNoz.schemas'; @@ -21,7 +23,7 @@ import { GeneratedAPIInstance } from '../../../generatedAPIInstance'; import type { ErrorType, BodyType } from '../../../generatedAPIInstance'; /** - * Returns a paginated list of hosts with key infrastructure metrics: CPU usage (%), memory usage (%), I/O wait (%), disk usage (%), and 15-minute load average. Each host includes its current status (active/inactive based on metrics reported in the last 10 minutes) and metadata attributes (e.g., os.type). Supports filtering via a filter expression, filtering by host status, custom groupBy to aggregate hosts by any attribute, ordering by any of the five metrics, and pagination via offset/limit. The response type is 'list' for the default host.name grouping or 'grouped_list' for custom groupBy keys. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. + * Returns a paginated list of hosts with key infrastructure metrics: CPU usage (%), memory usage (%), I/O wait (%), disk usage (%), and 15-minute load average. Each host includes its current status (active/inactive based on metrics reported in the last 10 minutes) and metadata attributes (e.g., os.type). Supports filtering via a filter expression, filtering by host status, custom groupBy to aggregate hosts by any attribute, ordering by any of the five metrics, and pagination via offset/limit. The response type is 'list' for the default host.name grouping or 'grouped_list' for custom groupBy keys. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (cpu, memory, wait, load15, diskUsage) return -1 as a sentinel when no data is available for that field. * @summary List Hosts for Infra Monitoring */ export const listHosts = ( @@ -104,3 +106,87 @@ export const useListHosts = < return useMutation(mutationOptions); }; +/** + * Returns a paginated list of Kubernetes pods with key metrics: CPU usage, CPU request/limit utilization, memory working set, memory request/limit utilization, current pod phase (pending/running/succeeded/failed/unknown), and pod age (ms since start time). Each pod includes metadata attributes (namespace, node, workload owner such as deployment/statefulset/daemonset/job/cronjob, cluster). Supports filtering via a filter expression, custom groupBy to aggregate pods by any attribute, ordering by any of the six metrics (cpu, cpu_request, cpu_limit, memory, memory_request, memory_limit), and pagination via offset/limit. The response type is 'list' for the default k8s.pod.uid grouping (each row is one pod with its current phase) or 'grouped_list' for custom groupBy keys (each row aggregates pods in the group with per-phase counts: pendingPodCount, runningPodCount, succeededPodCount, failedPodCount, unknownPodCount derived from each pod's latest phase in the window). Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (podCPU, podCPURequest, podCPULimit, podMemory, podMemoryRequest, podMemoryLimit, podAge) return -1 as a sentinel when no data is available for that field. + * @summary List Pods for Infra Monitoring + */ +export const listPods = ( + inframonitoringtypesPostablePodsDTO: BodyType, + signal?: AbortSignal, +) => { + return GeneratedAPIInstance({ + url: `/api/v2/infra_monitoring/pods`, + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + data: inframonitoringtypesPostablePodsDTO, + signal, + }); +}; + +export const getListPodsMutationOptions = < + TError = ErrorType, + TContext = unknown, +>(options?: { + mutation?: UseMutationOptions< + Awaited>, + TError, + { data: BodyType }, + TContext + >; +}): UseMutationOptions< + Awaited>, + TError, + { data: BodyType }, + TContext +> => { + const mutationKey = ['listPods']; + const { mutation: mutationOptions } = options + ? options.mutation && + 'mutationKey' in options.mutation && + options.mutation.mutationKey + ? options + : { ...options, mutation: { ...options.mutation, mutationKey } } + : { mutation: { mutationKey } }; + + const mutationFn: MutationFunction< + Awaited>, + { data: BodyType } + > = (props) => { + const { data } = props ?? {}; + + return listPods(data); + }; + + return { mutationFn, ...mutationOptions }; +}; + +export type ListPodsMutationResult = NonNullable< + Awaited> +>; +export type ListPodsMutationBody = + BodyType; +export type ListPodsMutationError = ErrorType; + +/** + * @summary List Pods for Infra Monitoring + */ +export const useListPods = < + TError = ErrorType, + TContext = unknown, +>(options?: { + mutation?: UseMutationOptions< + Awaited>, + TError, + { data: BodyType }, + TContext + >; +}): UseMutationResult< + Awaited>, + TError, + { data: BodyType }, + TContext +> => { + const mutationOptions = getListPodsMutationOptions(options); + + return useMutation(mutationOptions); +}; diff --git a/frontend/src/api/generated/services/sigNoz.schemas.ts b/frontend/src/api/generated/services/sigNoz.schemas.ts index 7eca4e4de9..79b90c59a0 100644 --- a/frontend/src/api/generated/services/sigNoz.schemas.ts +++ b/frontend/src/api/generated/services/sigNoz.schemas.ts @@ -3243,6 +3243,108 @@ export interface InframonitoringtypesHostsDTO { warning?: Querybuildertypesv5QueryWarnDataDTO; } +export enum InframonitoringtypesPodPhaseDTO { + pending = 'pending', + running = 'running', + succeeded = 'succeeded', + failed = 'failed', + unknown = 'unknown', + '' = '', +} +/** + * @nullable + */ +export type InframonitoringtypesPodRecordDTOMeta = { + [key: string]: unknown; +} | null; + +export interface InframonitoringtypesPodRecordDTO { + /** + * @type integer + */ + failedPodCount: number; + /** + * @type object + * @nullable true + */ + meta: InframonitoringtypesPodRecordDTOMeta; + /** + * @type integer + */ + pendingPodCount: number; + /** + * @type integer + * @format int64 + */ + podAge: number; + /** + * @type number + * @format double + */ + podCPU: number; + /** + * @type number + * @format double + */ + podCPULimit: number; + /** + * @type number + * @format double + */ + podCPURequest: number; + /** + * @type number + * @format double + */ + podMemory: number; + /** + * @type number + * @format double + */ + podMemoryLimit: number; + /** + * @type number + * @format double + */ + podMemoryRequest: number; + podPhase: InframonitoringtypesPodPhaseDTO; + /** + * @type string + */ + podUID: string; + /** + * @type integer + */ + runningPodCount: number; + /** + * @type integer + */ + succeededPodCount: number; + /** + * @type integer + */ + unknownPodCount: number; +} + +export interface InframonitoringtypesPodsDTO { + /** + * @type boolean + */ + endTimeBeforeRetention: boolean; + /** + * @type array + * @nullable true + */ + records: InframonitoringtypesPodRecordDTO[] | null; + requiredMetricsCheck: InframonitoringtypesRequiredMetricsCheckDTO; + /** + * @type integer + */ + total: number; + type: InframonitoringtypesResponseTypeDTO; + warning?: Querybuildertypesv5QueryWarnDataDTO; +} + export interface InframonitoringtypesPostableHostsDTO { /** * @type integer @@ -3271,6 +3373,34 @@ export interface InframonitoringtypesPostableHostsDTO { start: number; } +export interface InframonitoringtypesPostablePodsDTO { + /** + * @type integer + * @format int64 + */ + end: number; + filter?: Querybuildertypesv5FilterDTO; + /** + * @type array + * @nullable true + */ + groupBy?: Querybuildertypesv5GroupByKeyDTO[] | null; + /** + * @type integer + */ + limit: number; + /** + * @type integer + */ + offset?: number; + orderBy?: Querybuildertypesv5OrderByDTO; + /** + * @type integer + * @format int64 + */ + start: number; +} + export interface InframonitoringtypesRequiredMetricsCheckDTO { /** * @type array @@ -7350,6 +7480,14 @@ export type ListHosts200 = { status: string; }; +export type ListPods200 = { + data: InframonitoringtypesPodsDTO; + /** + * @type string + */ + status: string; +}; + export type Livez200 = { data: FactoryResponseDTO; /** diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index 209daa598f..6b0581232f 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -139,8 +139,8 @@ function ChartPreview({ if (startTime && endTime && startTime !== endTime) { dispatch( UpdateTimeInterval('custom', [ - parseInt(getTimeString(startTime), 10), - parseInt(getTimeString(endTime), 10), + Number.parseInt(getTimeString(startTime), 10), + Number.parseInt(getTimeString(endTime), 10), ]), ); } diff --git a/frontend/src/container/FormAlertRules/index.tsx b/frontend/src/container/FormAlertRules/index.tsx index 11dc64f71e..82d3d714ef 100644 --- a/frontend/src/container/FormAlertRules/index.tsx +++ b/frontend/src/container/FormAlertRules/index.tsx @@ -370,7 +370,7 @@ function FormAlertRules({ // onQueryCategoryChange handles changes to query category // in state as well as sets additional defaults const onQueryCategoryChange = (val: EQueryType): void => { - const element = document.getElementById('top'); + const element = document.querySelector('#top'); if (element) { element.scrollIntoView({ behavior: 'smooth' }); } diff --git a/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx b/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx index fa9f789143..0778c2cfbb 100644 --- a/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx +++ b/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx @@ -279,7 +279,7 @@ function Explorer(): JSX.Element { [], ); - const [warning, setWarning] = useState(undefined); + const [warning, setWarning] = useState(); const oneChartPerQueryDisabledTooltip = useMemo(() => { if (splitedQueries.length <= 1) { @@ -291,7 +291,7 @@ function Explorer(): JSX.Element { if (disableOneChartPerQuery) { return 'One chart per query cannot be disabled for multiple queries with different units.'; } - return undefined; + return; }, [disableOneChartPerQuery, splitedQueries.length, units.length]); // Show the y axis unit selector if - diff --git a/frontend/src/container/MetricsExplorer/Inspect/Inspect.tsx b/frontend/src/container/MetricsExplorer/Inspect/Inspect.tsx index 62d78ff178..1593a06e24 100644 --- a/frontend/src/container/MetricsExplorer/Inspect/Inspect.tsx +++ b/frontend/src/container/MetricsExplorer/Inspect/Inspect.tsx @@ -217,7 +217,7 @@ function Inspect({ ); } - if (!inspectMetricsTimeSeries.length) { + if (inspectMetricsTimeSeries.length === 0) { return renderFallback( 'inspect-metrics-empty', , diff --git a/frontend/src/container/MetricsExplorer/Inspect/useInspectMetrics.ts b/frontend/src/container/MetricsExplorer/Inspect/useInspectMetrics.ts index 3c56ac2d17..3ea23bc13f 100644 --- a/frontend/src/container/MetricsExplorer/Inspect/useInspectMetrics.ts +++ b/frontend/src/container/MetricsExplorer/Inspect/useInspectMetrics.ts @@ -254,10 +254,10 @@ export function useInspectMetrics( const valuesMap = new Map(); series.values.forEach(({ timestamp, value }) => { - valuesMap.set(timestamp, parseFloat(value)); + valuesMap.set(timestamp, Number.parseFloat(value)); }); - return timestamps.map((timestamp) => valuesMap.get(timestamp) ?? NaN); + return timestamps.map((timestamp) => valuesMap.get(timestamp) ?? Number.NaN); }); const rawData = [timestamps, ...timeseriesArray]; @@ -271,7 +271,7 @@ export function useInspectMetrics( labels.add(label); }); }); - return Array.from(labels); + return [...labels]; }, [inspectMetricsData]); const reset = useCallback(() => { diff --git a/frontend/src/hooks/queryBuilder/useGetQueryRange.ts b/frontend/src/hooks/queryBuilder/useGetQueryRange.ts index 4a47ed88f8..b017477d6d 100644 --- a/frontend/src/hooks/queryBuilder/useGetQueryRange.ts +++ b/frontend/src/hooks/queryBuilder/useGetQueryRange.ts @@ -115,8 +115,10 @@ export const useGetQueryRange: UseGetQueryRange = ( const updatedQuery = updateBarStepInterval( requestData.query, - requestData.start ? requestData.start * 1e3 : parseInt(start, 10) * 1e3, - requestData.end ? requestData.end * 1e3 : parseInt(end, 10) * 1e3, + requestData.start + ? requestData.start * 1e3 + : Number.parseInt(start, 10) * 1e3, + requestData.end ? requestData.end * 1e3 : Number.parseInt(end, 10) * 1e3, ); return { diff --git a/frontend/src/pages/LogsExplorer/index.tsx b/frontend/src/pages/LogsExplorer/index.tsx index 9bf139acbe..ad14e34d62 100644 --- a/frontend/src/pages/LogsExplorer/index.tsx +++ b/frontend/src/pages/LogsExplorer/index.tsx @@ -98,7 +98,7 @@ function LogsExplorer(): JSX.Element { setIsLoadingQueries(false); }, [queryClient]); - const [warning, setWarning] = useState(undefined); + const [warning, setWarning] = useState(); const handleChangeSelectedView = useCallback( (view: ExplorerViews, querySearchParameters?: ICurrentQueryData): void => { diff --git a/frontend/src/pages/TracesExplorer/index.tsx b/frontend/src/pages/TracesExplorer/index.tsx index 071f71cdbd..b0c3f74e83 100644 --- a/frontend/src/pages/TracesExplorer/index.tsx +++ b/frontend/src/pages/TracesExplorer/index.tsx @@ -101,7 +101,7 @@ function TracesExplorer(): JSX.Element { getExplorerViewFromUrl(searchParams, panelTypesFromUrl), ); - const [warning, setWarning] = useState(undefined); + const [warning, setWarning] = useState(); const [isOpen, setOpen] = useState(true); const defaultQuery = useMemo( diff --git a/pkg/apiserver/signozapiserver/inframonitoring.go b/pkg/apiserver/signozapiserver/inframonitoring.go index 5ed854ca23..6a3c7db145 100644 --- a/pkg/apiserver/signozapiserver/inframonitoring.go +++ b/pkg/apiserver/signozapiserver/inframonitoring.go @@ -16,7 +16,7 @@ func (provider *provider) addInfraMonitoringRoutes(router *mux.Router) error { ID: "ListHosts", Tags: []string{"inframonitoring"}, Summary: "List Hosts for Infra Monitoring", - Description: "Returns a paginated list of hosts with key infrastructure metrics: CPU usage (%), memory usage (%), I/O wait (%), disk usage (%), and 15-minute load average. Each host includes its current status (active/inactive based on metrics reported in the last 10 minutes) and metadata attributes (e.g., os.type). Supports filtering via a filter expression, filtering by host status, custom groupBy to aggregate hosts by any attribute, ordering by any of the five metrics, and pagination via offset/limit. The response type is 'list' for the default host.name grouping or 'grouped_list' for custom groupBy keys. Also reports missing required metrics and whether the requested time range falls before the data retention boundary.", + Description: "Returns a paginated list of hosts with key infrastructure metrics: CPU usage (%), memory usage (%), I/O wait (%), disk usage (%), and 15-minute load average. Each host includes its current status (active/inactive based on metrics reported in the last 10 minutes) and metadata attributes (e.g., os.type). Supports filtering via a filter expression, filtering by host status, custom groupBy to aggregate hosts by any attribute, ordering by any of the five metrics, and pagination via offset/limit. The response type is 'list' for the default host.name grouping or 'grouped_list' for custom groupBy keys. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (cpu, memory, wait, load15, diskUsage) return -1 as a sentinel when no data is available for that field.", Request: new(inframonitoringtypes.PostableHosts), RequestContentType: "application/json", Response: new(inframonitoringtypes.Hosts), @@ -29,5 +29,24 @@ func (provider *provider) addInfraMonitoringRoutes(router *mux.Router) error { return err } + if err := router.Handle("/api/v2/infra_monitoring/pods", handler.New( + provider.authZ.ViewAccess(provider.infraMonitoringHandler.ListPods), + handler.OpenAPIDef{ + ID: "ListPods", + Tags: []string{"inframonitoring"}, + Summary: "List Pods for Infra Monitoring", + Description: "Returns a paginated list of Kubernetes pods with key metrics: CPU usage, CPU request/limit utilization, memory working set, memory request/limit utilization, current pod phase (pending/running/succeeded/failed/unknown), and pod age (ms since start time). Each pod includes metadata attributes (namespace, node, workload owner such as deployment/statefulset/daemonset/job/cronjob, cluster). Supports filtering via a filter expression, custom groupBy to aggregate pods by any attribute, ordering by any of the six metrics (cpu, cpu_request, cpu_limit, memory, memory_request, memory_limit), and pagination via offset/limit. The response type is 'list' for the default k8s.pod.uid grouping (each row is one pod with its current phase) or 'grouped_list' for custom groupBy keys (each row aggregates pods in the group with per-phase counts: pendingPodCount, runningPodCount, succeededPodCount, failedPodCount, unknownPodCount derived from each pod's latest phase in the window). Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (podCPU, podCPURequest, podCPULimit, podMemory, podMemoryRequest, podMemoryLimit, podAge) return -1 as a sentinel when no data is available for that field.", + Request: new(inframonitoringtypes.PostablePods), + RequestContentType: "application/json", + Response: new(inframonitoringtypes.Pods), + ResponseContentType: "application/json", + SuccessStatusCode: http.StatusOK, + ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized}, + Deprecated: false, + SecuritySchemes: newSecuritySchemes(types.RoleViewer), + })).Methods(http.MethodPost).GetError(); err != nil { + return err + } + return nil } diff --git a/pkg/modules/inframonitoring/implinframonitoring/handler.go b/pkg/modules/inframonitoring/implinframonitoring/handler.go index bba53e179f..7e3d9d03fc 100644 --- a/pkg/modules/inframonitoring/implinframonitoring/handler.go +++ b/pkg/modules/inframonitoring/implinframonitoring/handler.go @@ -45,3 +45,27 @@ func (h *handler) ListHosts(rw http.ResponseWriter, req *http.Request) { render.Success(rw, http.StatusOK, result) } + +func (h *handler) ListPods(rw http.ResponseWriter, req *http.Request) { + claims, err := authtypes.ClaimsFromContext(req.Context()) + if err != nil { + render.Error(rw, err) + return + } + + orgID := valuer.MustNewUUID(claims.OrgID) + + var parsedReq inframonitoringtypes.PostablePods + if err := binding.JSON.BindBody(req.Body, &parsedReq); err != nil { + render.Error(rw, err) + return + } + + result, err := h.module.ListPods(req.Context(), orgID, &parsedReq) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusOK, result) +} diff --git a/pkg/modules/inframonitoring/implinframonitoring/internaltypes.go b/pkg/modules/inframonitoring/implinframonitoring/internaltypes.go index c5f7f8593e..5fd3fe39df 100644 --- a/pkg/modules/inframonitoring/implinframonitoring/internaltypes.go +++ b/pkg/modules/inframonitoring/implinframonitoring/internaltypes.go @@ -14,3 +14,12 @@ type groupHostStatusCounts struct { Active int Inactive int } + +// podPhaseCounts holds per-group pod counts bucketed by latest phase in window. +type podPhaseCounts struct { + Pending int + Running int + Succeeded int + Failed int + Unknown int +} diff --git a/pkg/modules/inframonitoring/implinframonitoring/module.go b/pkg/modules/inframonitoring/implinframonitoring/module.go index 091a17125b..0776393dec 100644 --- a/pkg/modules/inframonitoring/implinframonitoring/module.go +++ b/pkg/modules/inframonitoring/implinframonitoring/module.go @@ -159,3 +159,86 @@ func (m *module) ListHosts(ctx context.Context, orgID valuer.UUID, req *inframon return resp, nil } + +func (m *module) ListPods(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostablePods) (*inframonitoringtypes.Pods, error) { + if err := req.Validate(); err != nil { + return nil, err + } + + resp := &inframonitoringtypes.Pods{} + + if req.OrderBy == nil { + req.OrderBy = &qbtypes.OrderBy{ + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: inframonitoringtypes.PodsOrderByCPU, + }, + }, + Direction: qbtypes.OrderDirectionDesc, + } + } + + if len(req.GroupBy) == 0 { + req.GroupBy = []qbtypes.GroupByKey{podUIDGroupByKey} + resp.Type = inframonitoringtypes.ResponseTypeList + } else { + resp.Type = inframonitoringtypes.ResponseTypeGroupedList + } + + missingMetrics, minFirstReportedUnixMilli, err := m.getMetricsExistenceAndEarliestTime(ctx, podsTableMetricNamesList) + if err != nil { + return nil, err + } + if len(missingMetrics) > 0 { + resp.RequiredMetricsCheck = inframonitoringtypes.RequiredMetricsCheck{MissingMetrics: missingMetrics} + resp.Records = []inframonitoringtypes.PodRecord{} + resp.Total = 0 + return resp, nil + } + if req.End < int64(minFirstReportedUnixMilli) { + resp.EndTimeBeforeRetention = true + resp.Records = []inframonitoringtypes.PodRecord{} + resp.Total = 0 + return resp, nil + } + resp.RequiredMetricsCheck = inframonitoringtypes.RequiredMetricsCheck{MissingMetrics: []string{}} + + metadataMap, err := m.getPodsTableMetadata(ctx, req) + if err != nil { + return nil, err + } + + resp.Total = len(metadataMap) + + pageGroups, err := m.getTopPodGroups(ctx, orgID, req, metadataMap) + if err != nil { + return nil, err + } + + if len(pageGroups) == 0 { + resp.Records = []inframonitoringtypes.PodRecord{} + return resp, nil + } + + filterExpr := "" + if req.Filter != nil { + filterExpr = req.Filter.Expression + } + + fullQueryReq := buildFullQueryRequest(req.Start, req.End, filterExpr, req.GroupBy, pageGroups, m.newPodsTableListQuery()) + queryResp, err := m.querier.QueryRange(ctx, orgID, fullQueryReq) + if err != nil { + return nil, err + } + + phaseCounts, err := m.getPerGroupPodPhaseCounts(ctx, req, pageGroups) + if err != nil { + return nil, err + } + + isPodUIDInGroupBy := isKeyInGroupByAttrs(req.GroupBy, podUIDAttrKey) + resp.Records = buildPodRecords(isPodUIDInGroupBy, queryResp, pageGroups, req.GroupBy, metadataMap, phaseCounts, req.End) + resp.Warning = queryResp.Warning + + return resp, nil +} diff --git a/pkg/modules/inframonitoring/implinframonitoring/pods.go b/pkg/modules/inframonitoring/implinframonitoring/pods.go new file mode 100644 index 0000000000..24f62703de --- /dev/null +++ b/pkg/modules/inframonitoring/implinframonitoring/pods.go @@ -0,0 +1,334 @@ +package implinframonitoring + +import ( + "context" + "fmt" + "slices" + "strings" + "time" + + "github.com/SigNoz/signoz/pkg/querybuilder" + "github.com/SigNoz/signoz/pkg/telemetrymetrics" + "github.com/SigNoz/signoz/pkg/types/inframonitoringtypes" + "github.com/SigNoz/signoz/pkg/types/metrictypes" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/huandu/go-sqlbuilder" +) + +// buildPodRecords assembles the page records. Phase counts come from +// phaseCounts in both modes. In list mode (isPodUIDInGroupBy=true) each +// group is one pod, so exactly one count is 1; PodPhase is derived from +// which one. In grouped_list mode PodPhase stays PodPhaseNone. +func buildPodRecords( + isPodUIDInGroupBy bool, + resp *qbtypes.QueryRangeResponse, + pageGroups []map[string]string, + groupBy []qbtypes.GroupByKey, + metadataMap map[string]map[string]string, + phaseCounts map[string]podPhaseCounts, + reqEnd int64, +) []inframonitoringtypes.PodRecord { + metricsMap := parseFullQueryResponse(resp, groupBy) + + records := make([]inframonitoringtypes.PodRecord, 0, len(pageGroups)) + for _, labels := range pageGroups { + compositeKey := compositeKeyFromLabels(labels, groupBy) + podUID := labels[podUIDAttrKey] + + record := inframonitoringtypes.PodRecord{ // initialize with default values + PodUID: podUID, + PodPhase: inframonitoringtypes.PodPhaseNone, + PodCPU: -1, + PodCPURequest: -1, + PodCPULimit: -1, + PodMemory: -1, + PodMemoryRequest: -1, + PodMemoryLimit: -1, + PodAge: -1, + Meta: map[string]any{}, + } + + if metrics, ok := metricsMap[compositeKey]; ok { + if v, exists := metrics["A"]; exists { + record.PodCPU = v + } + if v, exists := metrics["B"]; exists { + record.PodCPURequest = v + } + if v, exists := metrics["C"]; exists { + record.PodCPULimit = v + } + if v, exists := metrics["D"]; exists { + record.PodMemory = v + } + if v, exists := metrics["E"]; exists { + record.PodMemoryRequest = v + } + if v, exists := metrics["F"]; exists { + record.PodMemoryLimit = v + } + } + + if phaseCountsForGroup, ok := phaseCounts[compositeKey]; ok { + record.PendingPodCount = phaseCountsForGroup.Pending + record.RunningPodCount = phaseCountsForGroup.Running + record.SucceededPodCount = phaseCountsForGroup.Succeeded + record.FailedPodCount = phaseCountsForGroup.Failed + record.UnknownPodCount = phaseCountsForGroup.Unknown + + // In list mode each group is one pod; the count==1 bucket identifies the phase. + if isPodUIDInGroupBy { + switch { + case phaseCountsForGroup.Pending == 1: + record.PodPhase = inframonitoringtypes.PodPhasePending + case phaseCountsForGroup.Running == 1: + record.PodPhase = inframonitoringtypes.PodPhaseRunning + case phaseCountsForGroup.Succeeded == 1: + record.PodPhase = inframonitoringtypes.PodPhaseSucceeded + case phaseCountsForGroup.Failed == 1: + record.PodPhase = inframonitoringtypes.PodPhaseFailed + case phaseCountsForGroup.Unknown == 1: + record.PodPhase = inframonitoringtypes.PodPhaseUnknown + } + } + } + + if attrs, ok := metadataMap[compositeKey]; ok && isPodUIDInGroupBy { + // the condition above ensures we deduce age only if pod uid is in group by because if + // it's not in group by then we might have multiple pod uids in the same group and hence then podAge wont make sense + if startTimeStr, exists := attrs[podStartTimeAttrKey]; exists && startTimeStr != "" { + if t, err := time.Parse(time.RFC3339, startTimeStr); err == nil { + startTimeMs := t.UnixMilli() + if startTimeMs > 0 { + record.PodAge = reqEnd - startTimeMs + } + } + } + for k, v := range attrs { + record.Meta[k] = v + } + } + + records = append(records, record) + } + return records +} + +func (m *module) getTopPodGroups( + ctx context.Context, + orgID valuer.UUID, + req *inframonitoringtypes.PostablePods, + metadataMap map[string]map[string]string, +) ([]map[string]string, error) { + orderByKey := req.OrderBy.Key.Name + queryNamesForOrderBy := orderByToPodsQueryNames[orderByKey] + rankingQueryName := queryNamesForOrderBy[len(queryNamesForOrderBy)-1] + + topReq := &qbtypes.QueryRangeRequest{ + Start: uint64(req.Start), + End: uint64(req.End), + RequestType: qbtypes.RequestTypeScalar, + CompositeQuery: qbtypes.CompositeQuery{ + Queries: make([]qbtypes.QueryEnvelope, 0, len(queryNamesForOrderBy)), + }, + } + + for _, envelope := range m.newPodsTableListQuery().CompositeQuery.Queries { + if !slices.Contains(queryNamesForOrderBy, envelope.GetQueryName()) { + continue + } + copied := envelope + if copied.Type == qbtypes.QueryTypeBuilder { + existingExpr := "" + if f := copied.GetFilter(); f != nil { + existingExpr = f.Expression + } + reqFilterExpr := "" + if req.Filter != nil { + reqFilterExpr = req.Filter.Expression + } + merged := mergeFilterExpressions(existingExpr, reqFilterExpr) + copied.SetFilter(&qbtypes.Filter{Expression: merged}) + copied.SetGroupBy(req.GroupBy) + } + topReq.CompositeQuery.Queries = append(topReq.CompositeQuery.Queries, copied) + } + + resp, err := m.querier.QueryRange(ctx, orgID, topReq) + if err != nil { + return nil, err + } + + allMetricGroups := parseAndSortGroups(resp, rankingQueryName, req.GroupBy, req.OrderBy.Direction) + return paginateWithBackfill(allMetricGroups, metadataMap, req.GroupBy, req.Offset, req.Limit), nil +} + +func (m *module) getPodsTableMetadata(ctx context.Context, req *inframonitoringtypes.PostablePods) (map[string]map[string]string, error) { + var nonGroupByAttrs []string + for _, key := range podAttrKeysForMetadata { + if !isKeyInGroupByAttrs(req.GroupBy, key) { + nonGroupByAttrs = append(nonGroupByAttrs, key) + } + } + return m.getMetadata(ctx, podsTableMetricNamesList, req.GroupBy, nonGroupByAttrs, req.Filter, req.Start, req.End) +} + +// getPerGroupPodPhaseCounts computes per-group pod counts bucketed by each +// pod's latest phase in the requested window. +// Pipeline: +// +// timeSeriesFPs: fp ↔ (pod_uid, groupBy cols) from the time_series table. +// User filter + page-groups filter applied here. +// latestPhasePerPod: INNER JOIN samples × timeSeriesFPs, collapsed to +// the latest phase per pod via argMax(value, unix_milli). +// countPodsPerPhase: per-group uniqExactIf into 5 phase buckets. +// +// Groups absent from the result map have implicit zero counts (caller default). +func (m *module) getPerGroupPodPhaseCounts( + ctx context.Context, + req *inframonitoringtypes.PostablePods, + pageGroups []map[string]string, +) (map[string]podPhaseCounts, error) { + if len(pageGroups) == 0 || len(req.GroupBy) == 0 { + return map[string]podPhaseCounts{}, nil + } + + // Merged filter expression (user filter + page-groups IN clauses). + reqFilterExpr := "" + if req.Filter != nil { + reqFilterExpr = req.Filter.Expression + } + pageGroupsFilterExpr := buildPageGroupsFilterExpr(pageGroups) + filterExpr := mergeFilterExpressions(reqFilterExpr, pageGroupsFilterExpr) + + // Resolve tables. Same convention as hosts (distributed names from helpers). + adjustedStart, adjustedEnd, _, localTimeSeriesTable := telemetrymetrics.WhichTSTableToUse( + uint64(req.Start), uint64(req.End), nil, + ) + samplesTable := telemetrymetrics.WhichSamplesTableToUse( + uint64(req.Start), uint64(req.End), + metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil, + ) + valueCol := telemetrymetrics.ValueColumnForSamplesTable(samplesTable) + + // ----- timeSeriesFPs ----- + timeSeriesFPs := sqlbuilder.NewSelectBuilder() + timeSeriesFPsSelectCols := []string{ + "fingerprint", + fmt.Sprintf("JSONExtractString(labels, %s) AS pod_uid", timeSeriesFPs.Var(podUIDAttrKey)), + } + for _, key := range req.GroupBy { + timeSeriesFPsSelectCols = append(timeSeriesFPsSelectCols, + fmt.Sprintf("JSONExtractString(labels, %s) AS %s", timeSeriesFPs.Var(key.Name), quoteIdentifier(key.Name)), + ) + } + timeSeriesFPs.Select(timeSeriesFPsSelectCols...) + timeSeriesFPs.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, localTimeSeriesTable)) + timeSeriesFPs.Where( + timeSeriesFPs.E("metric_name", podPhaseMetricName), + timeSeriesFPs.GE("unix_milli", adjustedStart), + timeSeriesFPs.L("unix_milli", adjustedEnd), + ) + if filterExpr != "" { + filterClause, err := m.buildFilterClause(ctx, &qbtypes.Filter{Expression: filterExpr}, req.Start, req.End) + if err != nil { + return nil, err + } + if filterClause != nil { + timeSeriesFPs.AddWhereClause(filterClause) + } + } + timeSeriesFPsGroupBy := []string{"fingerprint", "pod_uid"} + for _, key := range req.GroupBy { + timeSeriesFPsGroupBy = append(timeSeriesFPsGroupBy, quoteIdentifier(key.Name)) + } + timeSeriesFPs.GroupBy(timeSeriesFPsGroupBy...) + timeSeriesFPsSQL, timeSeriesFPsArgs := timeSeriesFPs.BuildWithFlavor(sqlbuilder.ClickHouse) + + latestPhasePerPod := sqlbuilder.NewSelectBuilder() + latestPhasePerPodSelectCols := []string{"tsfp.pod_uid AS pod_uid"} + latestPhasePerPodGroupBy := []string{"pod_uid"} + for _, key := range req.GroupBy { + col := quoteIdentifier(key.Name) + latestPhasePerPodSelectCols = append(latestPhasePerPodSelectCols, fmt.Sprintf("tsfp.%s AS %s", col, col)) + latestPhasePerPodGroupBy = append(latestPhasePerPodGroupBy, col) + } + latestPhasePerPodSelectCols = append(latestPhasePerPodSelectCols, + fmt.Sprintf("argMax(samples.%s, samples.unix_milli) AS phase_value", valueCol), + ) + latestPhasePerPod.Select(latestPhasePerPodSelectCols...) + latestPhasePerPod.From(fmt.Sprintf( + "%s.%s AS samples INNER JOIN time_series_fps AS tsfp ON samples.fingerprint = tsfp.fingerprint", + telemetrymetrics.DBName, samplesTable, + )) + latestPhasePerPod.Where( + latestPhasePerPod.E("samples.metric_name", podPhaseMetricName), + latestPhasePerPod.GE("samples.unix_milli", req.Start), + latestPhasePerPod.L("samples.unix_milli", req.End), + "tsfp.pod_uid != ''", + ) + latestPhasePerPod.GroupBy(latestPhasePerPodGroupBy...) + latestPhasePerPodSQL, latestPhasePerPodArgs := latestPhasePerPod.BuildWithFlavor(sqlbuilder.ClickHouse) + + // ----- countPodsPerPhase (outer SELECT) ----- + countPodsPerPhaseSelectCols := make([]string, 0, len(req.GroupBy)+5) + countPodsPerPhaseGroupBy := make([]string, 0, len(req.GroupBy)) + for _, key := range req.GroupBy { + col := quoteIdentifier(key.Name) + countPodsPerPhaseSelectCols = append(countPodsPerPhaseSelectCols, col) + countPodsPerPhaseGroupBy = append(countPodsPerPhaseGroupBy, col) + } + countPodsPerPhaseSelectCols = append(countPodsPerPhaseSelectCols, + fmt.Sprintf("uniqExactIf(pod_uid, phase_value = %d) AS pending_count", inframonitoringtypes.PodPhaseNumPending), + fmt.Sprintf("uniqExactIf(pod_uid, phase_value = %d) AS running_count", inframonitoringtypes.PodPhaseNumRunning), + fmt.Sprintf("uniqExactIf(pod_uid, phase_value = %d) AS succeeded_count", inframonitoringtypes.PodPhaseNumSucceeded), + fmt.Sprintf("uniqExactIf(pod_uid, phase_value = %d) AS failed_count", inframonitoringtypes.PodPhaseNumFailed), + fmt.Sprintf("uniqExactIf(pod_uid, phase_value = %d) AS unknown_count", inframonitoringtypes.PodPhaseNumUnknown), + ) + countPodsPerPhaseSQL := fmt.Sprintf( + "SELECT %s FROM latest_phase_per_pod GROUP BY %s", + strings.Join(countPodsPerPhaseSelectCols, ", "), + strings.Join(countPodsPerPhaseGroupBy, ", "), + ) + + // Combine CTEs + outer. + cteFragments := []string{ + fmt.Sprintf("time_series_fps AS (%s)", timeSeriesFPsSQL), + fmt.Sprintf("latest_phase_per_pod AS (%s)", latestPhasePerPodSQL), + } + finalSQL := querybuilder.CombineCTEs(cteFragments) + countPodsPerPhaseSQL + finalArgs := querybuilder.PrependArgs([][]any{timeSeriesFPsArgs, latestPhasePerPodArgs}, nil) + + rows, err := m.telemetryStore.ClickhouseDB().Query(ctx, finalSQL, finalArgs...) + if err != nil { + return nil, err + } + defer rows.Close() + + result := make(map[string]podPhaseCounts) + for rows.Next() { + groupVals := make([]string, len(req.GroupBy)) + scanPtrs := make([]any, 0, len(req.GroupBy)+5) + for i := range groupVals { + scanPtrs = append(scanPtrs, &groupVals[i]) + } + var pending, running, succeeded, failed, unknown uint64 + scanPtrs = append(scanPtrs, &pending, &running, &succeeded, &failed, &unknown) + + if err := rows.Scan(scanPtrs...); err != nil { + return nil, err + } + result[compositeKeyFromList(groupVals)] = podPhaseCounts{ + Pending: int(pending), + Running: int(running), + Succeeded: int(succeeded), + Failed: int(failed), + Unknown: int(unknown), + } + } + if err := rows.Err(); err != nil { + return nil, err + } + return result, nil +} diff --git a/pkg/modules/inframonitoring/implinframonitoring/pods_constants.go b/pkg/modules/inframonitoring/implinframonitoring/pods_constants.go new file mode 100644 index 0000000000..f5db10aa7f --- /dev/null +++ b/pkg/modules/inframonitoring/implinframonitoring/pods_constants.go @@ -0,0 +1,178 @@ +package implinframonitoring + +import ( + "github.com/SigNoz/signoz/pkg/types/inframonitoringtypes" + "github.com/SigNoz/signoz/pkg/types/metrictypes" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +) + +const ( + podUIDAttrKey = "k8s.pod.uid" + podStartTimeAttrKey = "k8s.pod.start_time" + podPhaseMetricName = "k8s.pod.phase" +) + +var podUIDGroupByKey = qbtypes.GroupByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: podUIDAttrKey, + FieldContext: telemetrytypes.FieldContextResource, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, +} + +var podsTableMetricNamesList = []string{ + "k8s.pod.cpu.usage", + "k8s.pod.cpu_request_utilization", + "k8s.pod.cpu_limit_utilization", + "k8s.pod.memory.working_set", + "k8s.pod.memory_request_utilization", + "k8s.pod.memory_limit_utilization", + "k8s.pod.phase", +} + +var podAttrKeysForMetadata = []string{ + "k8s.pod.uid", + "k8s.pod.name", + "k8s.namespace.name", + "k8s.node.name", + "k8s.deployment.name", + "k8s.statefulset.name", + "k8s.daemonset.name", + "k8s.job.name", + "k8s.cronjob.name", + "k8s.cluster.name", + "k8s.pod.start_time", +} + +var orderByToPodsQueryNames = map[string][]string{ + inframonitoringtypes.PodsOrderByCPU: {"A"}, + inframonitoringtypes.PodsOrderByCPURequest: {"B"}, + inframonitoringtypes.PodsOrderByCPULimit: {"C"}, + inframonitoringtypes.PodsOrderByMemory: {"D"}, + inframonitoringtypes.PodsOrderByMemoryRequest: {"E"}, + inframonitoringtypes.PodsOrderByMemoryLimit: {"F"}, +} + +// newPodsTableListQuery builds the composite QB v5 request for the pods list. +// Pod phase is derived separately via getPerGroupPodPhaseCounts (works for both +// list and grouped_list modes), so no phase query is included here. +func (m *module) newPodsTableListQuery() *qbtypes.QueryRangeRequest { + queries := []qbtypes.QueryEnvelope{ + // Query A: CPU usage + { + Type: qbtypes.QueryTypeBuilder, + Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{ + Name: "A", + Signal: telemetrytypes.SignalMetrics, + Aggregations: []qbtypes.MetricAggregation{ + { + MetricName: "k8s.pod.cpu.usage", + TimeAggregation: metrictypes.TimeAggregationAvg, + SpaceAggregation: metrictypes.SpaceAggregationSum, + ReduceTo: qbtypes.ReduceToAvg, + }, + }, + GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey}, + Disabled: false, + }, + }, + // Query B: CPU request utilization + { + Type: qbtypes.QueryTypeBuilder, + Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{ + Name: "B", + Signal: telemetrytypes.SignalMetrics, + Aggregations: []qbtypes.MetricAggregation{ + { + MetricName: "k8s.pod.cpu_request_utilization", + TimeAggregation: metrictypes.TimeAggregationAvg, + SpaceAggregation: metrictypes.SpaceAggregationAvg, + ReduceTo: qbtypes.ReduceToAvg, + }, + }, + GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey}, + Disabled: false, + }, + }, + // Query C: CPU limit utilization + { + Type: qbtypes.QueryTypeBuilder, + Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{ + Name: "C", + Signal: telemetrytypes.SignalMetrics, + Aggregations: []qbtypes.MetricAggregation{ + { + MetricName: "k8s.pod.cpu_limit_utilization", + TimeAggregation: metrictypes.TimeAggregationAvg, + SpaceAggregation: metrictypes.SpaceAggregationAvg, + ReduceTo: qbtypes.ReduceToAvg, + }, + }, + GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey}, + Disabled: false, + }, + }, + // Query D: Memory working set + { + Type: qbtypes.QueryTypeBuilder, + Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{ + Name: "D", + Signal: telemetrytypes.SignalMetrics, + Aggregations: []qbtypes.MetricAggregation{ + { + MetricName: "k8s.pod.memory.working_set", + TimeAggregation: metrictypes.TimeAggregationAvg, + SpaceAggregation: metrictypes.SpaceAggregationSum, + ReduceTo: qbtypes.ReduceToAvg, + }, + }, + GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey}, + Disabled: false, + }, + }, + // Query E: Memory request utilization + { + Type: qbtypes.QueryTypeBuilder, + Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{ + Name: "E", + Signal: telemetrytypes.SignalMetrics, + Aggregations: []qbtypes.MetricAggregation{ + { + MetricName: "k8s.pod.memory_request_utilization", + TimeAggregation: metrictypes.TimeAggregationAvg, + SpaceAggregation: metrictypes.SpaceAggregationAvg, + ReduceTo: qbtypes.ReduceToAvg, + }, + }, + GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey}, + Disabled: false, + }, + }, + // Query F: Memory limit utilization + { + Type: qbtypes.QueryTypeBuilder, + Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{ + Name: "F", + Signal: telemetrytypes.SignalMetrics, + Aggregations: []qbtypes.MetricAggregation{ + { + MetricName: "k8s.pod.memory_limit_utilization", + TimeAggregation: metrictypes.TimeAggregationAvg, + SpaceAggregation: metrictypes.SpaceAggregationAvg, + ReduceTo: qbtypes.ReduceToAvg, + }, + }, + GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey}, + Disabled: false, + }, + }, + } + + return &qbtypes.QueryRangeRequest{ + RequestType: qbtypes.RequestTypeScalar, + CompositeQuery: qbtypes.CompositeQuery{ + Queries: queries, + }, + } +} diff --git a/pkg/modules/inframonitoring/inframonitoring.go b/pkg/modules/inframonitoring/inframonitoring.go index 112c434ab5..cea731130b 100644 --- a/pkg/modules/inframonitoring/inframonitoring.go +++ b/pkg/modules/inframonitoring/inframonitoring.go @@ -10,8 +10,10 @@ import ( type Handler interface { ListHosts(http.ResponseWriter, *http.Request) + ListPods(http.ResponseWriter, *http.Request) } type Module interface { ListHosts(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostableHosts) (*inframonitoringtypes.Hosts, error) + ListPods(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostablePods) (*inframonitoringtypes.Pods, error) } diff --git a/pkg/telemetrymetrics/tables.go b/pkg/telemetrymetrics/tables.go index e390b63268..3700ff388c 100644 --- a/pkg/telemetrymetrics/tables.go +++ b/pkg/telemetrymetrics/tables.go @@ -124,6 +124,17 @@ func CountExpressionForSamplesTable(tableName string) string { return "sum(count)" } +// ValueColumnForSamplesTable returns the column name holding the sample value: +// "last" for the 5m/30m aggregated tables, "value" otherwise. +// note all the other columns in the aggregated samples tables are nothing but aggregations. +// and so "last" is the value column for these tables. +func ValueColumnForSamplesTable(tableName string) string { + if tableName == SamplesV4Agg5mTableName || tableName == SamplesV4Agg30mTableName { + return "last" + } + return "value" +} + // start and end are in milliseconds // we have three tables for samples // 1. distributed_samples_v4 diff --git a/pkg/types/inframonitoringtypes/hosts.go b/pkg/types/inframonitoringtypes/hosts.go index 4e89c5957c..8bb10dd7d9 100644 --- a/pkg/types/inframonitoringtypes/hosts.go +++ b/pkg/types/inframonitoringtypes/hosts.go @@ -49,7 +49,7 @@ type HostFilter struct { FilterByStatus HostStatus `json:"filterByStatus"` } -// Validate ensures HostsListRequest contains acceptable values. +// Validate ensures PostableHosts contains acceptable values. func (req *PostableHosts) Validate() error { if req == nil { return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil") diff --git a/pkg/types/inframonitoringtypes/pods.go b/pkg/types/inframonitoringtypes/pods.go new file mode 100644 index 0000000000..8526850953 --- /dev/null +++ b/pkg/types/inframonitoringtypes/pods.go @@ -0,0 +1,109 @@ +package inframonitoringtypes + +import ( + "encoding/json" + "slices" + + "github.com/SigNoz/signoz/pkg/errors" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +type Pods struct { + Type ResponseType `json:"type" required:"true"` + Records []PodRecord `json:"records" required:"true"` + Total int `json:"total" required:"true"` + RequiredMetricsCheck RequiredMetricsCheck `json:"requiredMetricsCheck" required:"true"` + EndTimeBeforeRetention bool `json:"endTimeBeforeRetention" required:"true"` + Warning *qbtypes.QueryWarnData `json:"warning,omitempty"` +} + +type PodRecord struct { + PodUID string `json:"podUID" required:"true"` + PodCPU float64 `json:"podCPU" required:"true"` + PodCPURequest float64 `json:"podCPURequest" required:"true"` + PodCPULimit float64 `json:"podCPULimit" required:"true"` + PodMemory float64 `json:"podMemory" required:"true"` + PodMemoryRequest float64 `json:"podMemoryRequest" required:"true"` + PodMemoryLimit float64 `json:"podMemoryLimit" required:"true"` + PodPhase PodPhase `json:"podPhase" required:"true"` + PendingPodCount int `json:"pendingPodCount" required:"true"` + RunningPodCount int `json:"runningPodCount" required:"true"` + SucceededPodCount int `json:"succeededPodCount" required:"true"` + FailedPodCount int `json:"failedPodCount" required:"true"` + UnknownPodCount int `json:"unknownPodCount" required:"true"` + PodAge int64 `json:"podAge" required:"true"` + Meta map[string]interface{} `json:"meta" required:"true"` +} + +// PostablePods is the request body for the v2 pods list API. +type PostablePods struct { + Start int64 `json:"start" required:"true"` + End int64 `json:"end" required:"true"` + Filter *qbtypes.Filter `json:"filter"` + GroupBy []qbtypes.GroupByKey `json:"groupBy"` + OrderBy *qbtypes.OrderBy `json:"orderBy"` + Offset int `json:"offset"` + Limit int `json:"limit" required:"true"` +} + +// Validate ensures PostablePods contains acceptable values. +func (req *PostablePods) Validate() error { + if req == nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil") + } + + if req.Start <= 0 { + return errors.NewInvalidInputf( + errors.CodeInvalidInput, + "invalid start time %d: start must be greater than 0", + req.Start, + ) + } + + if req.End <= 0 { + return errors.NewInvalidInputf( + errors.CodeInvalidInput, + "invalid end time %d: end must be greater than 0", + req.End, + ) + } + + if req.Start >= req.End { + return errors.NewInvalidInputf( + errors.CodeInvalidInput, + "invalid time range: start (%d) must be less than end (%d)", + req.Start, + req.End, + ) + } + + if req.Limit < 1 || req.Limit > 5000 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000") + } + + if req.Offset < 0 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "offset cannot be negative") + } + + if req.OrderBy != nil { + if !slices.Contains(PodsValidOrderByKeys, req.OrderBy.Key.Name) { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by key: %s", req.OrderBy.Key.Name) + } + if req.OrderBy.Direction != qbtypes.OrderDirectionAsc && req.OrderBy.Direction != qbtypes.OrderDirectionDesc { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by direction: %s", req.OrderBy.Direction) + } + } + + return nil +} + +// UnmarshalJSON validates input immediately after decoding. +func (req *PostablePods) UnmarshalJSON(data []byte) error { + type raw PostablePods + var decoded raw + if err := json.Unmarshal(data, &decoded); err != nil { + return err + } + *req = PostablePods(decoded) + return req.Validate() +} diff --git a/pkg/types/inframonitoringtypes/pods_constants.go b/pkg/types/inframonitoringtypes/pods_constants.go new file mode 100644 index 0000000000..d321c4b158 --- /dev/null +++ b/pkg/types/inframonitoringtypes/pods_constants.go @@ -0,0 +1,55 @@ +package inframonitoringtypes + +import "github.com/SigNoz/signoz/pkg/valuer" + +type PodPhase struct { + valuer.String +} + +var ( + PodPhasePending = PodPhase{valuer.NewString("pending")} + PodPhaseRunning = PodPhase{valuer.NewString("running")} + PodPhaseSucceeded = PodPhase{valuer.NewString("succeeded")} + PodPhaseFailed = PodPhase{valuer.NewString("failed")} + PodPhaseUnknown = PodPhase{valuer.NewString("unknown")} + PodPhaseNone = PodPhase{valuer.NewString("")} +) + +func (PodPhase) Enum() []any { + return []any{ + PodPhasePending, + PodPhaseRunning, + PodPhaseSucceeded, + PodPhaseFailed, + PodPhaseUnknown, + PodPhaseNone, + } +} + +// Numeric pod phase values emitted by the k8s.pod.phase metric +// (source: OTel kubeletstats receiver). +const ( + PodPhaseNumPending = 1 + PodPhaseNumRunning = 2 + PodPhaseNumSucceeded = 3 + PodPhaseNumFailed = 4 + PodPhaseNumUnknown = 5 +) + +const ( + PodsOrderByCPU = "cpu" + PodsOrderByCPURequest = "cpu_request" + PodsOrderByCPULimit = "cpu_limit" + PodsOrderByMemory = "memory" + PodsOrderByMemoryRequest = "memory_request" + PodsOrderByMemoryLimit = "memory_limit" +) + +var PodsValidOrderByKeys = []string{ + PodsOrderByCPU, + PodsOrderByCPURequest, + PodsOrderByCPULimit, + PodsOrderByMemory, + PodsOrderByMemoryRequest, + PodsOrderByMemoryLimit, +} diff --git a/pkg/types/inframonitoringtypes/pods_test.go b/pkg/types/inframonitoringtypes/pods_test.go new file mode 100644 index 0000000000..431b5d7b93 --- /dev/null +++ b/pkg/types/inframonitoringtypes/pods_test.go @@ -0,0 +1,219 @@ +package inframonitoringtypes + +import ( + "testing" + + "github.com/SigNoz/signoz/pkg/errors" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/stretchr/testify/require" +) + +func TestPostablePods_Validate(t *testing.T) { + tests := []struct { + name string + req *PostablePods + wantErr bool + }{ + { + name: "valid request", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: 0, + }, + wantErr: false, + }, + { + name: "nil request", + req: nil, + wantErr: true, + }, + { + name: "start time zero", + req: &PostablePods{ + Start: 0, + End: 2000, + Limit: 100, + Offset: 0, + }, + wantErr: true, + }, + { + name: "start time negative", + req: &PostablePods{ + Start: -1000, + End: 2000, + Limit: 100, + Offset: 0, + }, + wantErr: true, + }, + { + name: "end time zero", + req: &PostablePods{ + Start: 1000, + End: 0, + Limit: 100, + Offset: 0, + }, + wantErr: true, + }, + { + name: "start time greater than end time", + req: &PostablePods{ + Start: 2000, + End: 1000, + Limit: 100, + Offset: 0, + }, + wantErr: true, + }, + { + name: "start time equal to end time", + req: &PostablePods{ + Start: 1000, + End: 1000, + Limit: 100, + Offset: 0, + }, + wantErr: true, + }, + { + name: "limit zero", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 0, + Offset: 0, + }, + wantErr: true, + }, + { + name: "limit negative", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: -10, + Offset: 0, + }, + wantErr: true, + }, + { + name: "limit exceeds max", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 5001, + Offset: 0, + }, + wantErr: true, + }, + { + name: "offset negative", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: -5, + }, + wantErr: true, + }, + { + name: "orderBy nil is valid", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: 0, + }, + wantErr: false, + }, + { + name: "orderBy with valid key cpu and direction asc", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: 0, + OrderBy: &qbtypes.OrderBy{ + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: PodsOrderByCPU, + }, + }, + Direction: qbtypes.OrderDirectionAsc, + }, + }, + wantErr: false, + }, + { + name: "orderBy with phase key is rejected", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: 0, + OrderBy: &qbtypes.OrderBy{ + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "phase", + }, + }, + Direction: qbtypes.OrderDirectionDesc, + }, + }, + wantErr: true, + }, + { + name: "orderBy with invalid key", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: 0, + OrderBy: &qbtypes.OrderBy{ + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "unknown", + }, + }, + Direction: qbtypes.OrderDirectionDesc, + }, + }, + wantErr: true, + }, + { + name: "orderBy with valid key but invalid direction", + req: &PostablePods{ + Start: 1000, + End: 2000, + Limit: 100, + Offset: 0, + OrderBy: &qbtypes.OrderBy{ + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: PodsOrderByMemory, + }, + }, + Direction: qbtypes.OrderDirection{String: valuer.NewString("invalid")}, + }, + }, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.req.Validate() + if tt.wantErr { + require.Error(t, err) + require.True(t, errors.Ast(err, errors.TypeInvalidInput), "expected error to be of type InvalidInput") + } else { + require.NoError(t, err) + } + }) + } +} From be9dd7de0ea01a06d20e667dac6d3acf42f64324 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Tue, 28 Apr 2026 17:20:19 +0530 Subject: [PATCH 08/19] feat(billing-page): added cancel subscription option in billing page (#11118) * feat(billing-page): added cancel subscription option in billing page * feat(billing-page): semantic token correction * feat(billing-page): added test cases * feat(billing-page): used css module * feat(billing-page): added license condition and log event * feat(billing-page): added test cases * feat(billing-page): addressed design feedbacks --- .../src/AppRoutes/__tests__/Private.test.tsx | 6 +- .../BillingContainer.test.tsx | 91 ++++++++++++++- .../BillingContainer/BillingContainer.tsx | 10 +- .../CancelSubscriptionBanner.module.scss | 35 ++++++ .../CancelSubscriptionBanner.test.tsx | 68 +++++++++++ .../CancelSubscriptionBanner.tsx | 106 ++++++++++++++++++ frontend/src/tests/test-utils.tsx | 2 +- .../src/types/api/licensesV3/getActive.ts | 2 +- 8 files changed, 312 insertions(+), 8 deletions(-) create mode 100644 frontend/src/container/BillingContainer/CancelSubscriptionBanner.module.scss create mode 100644 frontend/src/container/BillingContainer/CancelSubscriptionBanner.test.tsx create mode 100644 frontend/src/container/BillingContainer/CancelSubscriptionBanner.tsx diff --git a/frontend/src/AppRoutes/__tests__/Private.test.tsx b/frontend/src/AppRoutes/__tests__/Private.test.tsx index 7bbff0cf9d..9e31871be5 100644 --- a/frontend/src/AppRoutes/__tests__/Private.test.tsx +++ b/frontend/src/AppRoutes/__tests__/Private.test.tsx @@ -105,7 +105,7 @@ function createMockLicense( status: '', updated_at: '0', }, - state: LicenseState.ACTIVE, + state: LicenseState.ACTIVATED, status: LicenseStatus.VALID, platform: LicensePlatform.CLOUD, created_at: '0', @@ -931,7 +931,7 @@ describe('PrivateRoute', () => { isFetchingActiveLicense: false, activeLicense: createMockLicense({ platform: LicensePlatform.CLOUD, - state: LicenseState.ACTIVE, + state: LicenseState.ACTIVATED, }), }, isCloudUser: true, @@ -1522,7 +1522,7 @@ describe('PrivateRoute', () => { isFetchingActiveLicense: false, activeLicense: createMockLicense({ platform: LicensePlatform.CLOUD, - state: LicenseState.ACTIVE, + state: LicenseState.ACTIVATED, }), trialInfo: createMockTrialInfo({ workSpaceBlock: false }), user: createMockUser({ role: USER_ROLES.ADMIN as ROLES }), diff --git a/frontend/src/container/BillingContainer/BillingContainer.test.tsx b/frontend/src/container/BillingContainer/BillingContainer.test.tsx index 0ebbd49398..d36a13123f 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.test.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.test.tsx @@ -4,7 +4,13 @@ import { notOfTrailResponse, trialConvertedToSubscriptionResponse, } from 'mocks-server/__mockdata__/licenses'; -import { act, render, screen } from 'tests/test-utils'; +import { act, render, screen, getAppContextMock } from 'tests/test-utils'; +import APIError from 'types/api/error'; +import { + LicensePlatform, + LicenseResModel, + LicenseState, +} from 'types/api/licensesV3/getActive'; import { getFormattedDate } from 'utils/timeUtils'; import BillingContainer from './BillingContainer'; @@ -99,6 +105,10 @@ describe('BillingContainer', () => { await expect( screen.findByRole('link', { name: /here/i }), ).resolves.toBeInTheDocument(); + + await expect( + screen.findByText('Cancel Subscription', { selector: 'span' }), + ).resolves.toBeInTheDocument(); }); it('OnTrail but trialConvertedToSubscription', async () => { @@ -138,6 +148,85 @@ describe('BillingContainer', () => { const dayRemainingInBillingPeriod = await screen.findByText(/1 days_remaining/i); expect(dayRemainingInBillingPeriod).toBeInTheDocument(); + + await expect( + screen.findByText('Cancel Subscription', { selector: 'span' }), + ).resolves.toBeInTheDocument(); + }); + }); + + describe('CancelSubscriptionBanner visibility', () => { + const baseActiveLicense = getAppContextMock('ADMIN') + .activeLicense as LicenseResModel; + + it('should render when license is ACTIVATED and platform is CLOUD', async () => { + render(); + await expect( + screen.findByText('Cancel Subscription', { selector: 'span' }), + ).resolves.toBeInTheDocument(); + }); + + it.each([ + ['EXPIRED', LicenseState.EXPIRED], + ['TERMINATED', LicenseState.TERMINATED], + ['CANCELLED', LicenseState.CANCELLED], + ['EVALUATION_EXPIRED', LicenseState.EVALUATION_EXPIRED], + ['DEFAULTED', LicenseState.DEFAULTED], + ['ISSUED', LicenseState.ISSUED], + ['EVALUATING', LicenseState.EVALUATING], + ])('should not render when license state is %s', async (_, state) => { + render( + , + {}, + { + appContextOverrides: { + activeLicense: { ...baseActiveLicense, state }, + }, + }, + ); + await screen.findByText('billing'); + expect( + screen.queryByText('Cancel Subscription', { selector: 'span' }), + ).not.toBeInTheDocument(); + }); + + const makeAPIError = (statusCode: number): APIError => + new APIError({ + httpStatusCode: statusCode as any, + error: { code: 'error', message: 'error', url: '', errors: [] }, + }); + + it.each([ + [ + 'Self-Hosted platform', + { + activeLicense: { + ...baseActiveLicense, + platform: LicensePlatform.SELF_HOSTED, + }, + activeLicenseFetchError: null, + }, + ], + [ + 'Community Enterprise user (license API 404)', + { + activeLicense: null, + activeLicenseFetchError: makeAPIError(404), + }, + ], + [ + 'Community user (license API 501)', + { + activeLicense: null, + activeLicenseFetchError: makeAPIError(501), + }, + ], + ])('should not render for %s', async (_, overrides) => { + render(, {}, { appContextOverrides: overrides }); + await screen.findByText('billing'); + expect( + screen.queryByText('Cancel Subscription', { selector: 'span' }), + ).not.toBeInTheDocument(); }); }); diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index 1490edbcc3..598655027a 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -34,10 +34,12 @@ import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; import { getBaseUrl } from 'utils/basePath'; import { getFormattedDate, getRemainingDays } from 'utils/timeUtils'; +import CancelSubscriptionBanner from './CancelSubscriptionBanner'; import { BillingUsageGraph } from './BillingUsageGraph/BillingUsageGraph'; import { prepareCsvData } from './BillingUsageGraph/utils'; import './BillingContainer.styles.scss'; +import { LicenseState } from 'types/api/licensesV3/getActive'; interface DataType { key: string; @@ -317,7 +319,7 @@ export default function BillingContainer(): JSX.Element { const handleBilling = useCallback(async () => { if (!trialInfo?.trialConvertedToSubscription) { - logEvent('Billing : Upgrade Plan', { + void logEvent('Billing : Upgrade Plan', { user: pick(user, ['email', 'userId', 'name']), org, }); @@ -326,7 +328,7 @@ export default function BillingContainer(): JSX.Element { url: getBaseUrl(), }); } else { - logEvent('Billing : Manage Billing', { + void logEvent('Billing : Manage Billing', { user: pick(user, ['email', 'userId', 'name']), org, }); @@ -535,6 +537,10 @@ export default function BillingContainer(): JSX.Element { {(isLoading || isFetchingBillingData) && renderTableSkeleton()}
+ {isCloudUserVal && activeLicense?.state === LicenseState.ACTIVATED && ( + + )} + {!trialInfo?.trialConvertedToSubscription && (
({ + getBasePath: (): string => '/', + withBasePath: (path: string): string => path, + getAbsoluteUrl: (path: string): string => `https://test.signoz.io${path}`, + getBaseUrl: (): string => 'https://test.signoz.io', +})); + +describe('CancelSubscriptionBanner', () => { + it('renders banner with title and subtitle', () => { + render(); + expect( + screen.getByText('Cancel Subscription', { selector: 'span' }), + ).toBeInTheDocument(); + expect( + screen.getByText('Cancel your SigNoz subscription.'), + ).toBeInTheDocument(); + }); + + it('opens dialog with correct content when Cancel Subscription is clicked', async () => { + const user = userEvent.setup({ pointerEventsCheck: 0 }); + render(); + + await user.click( + screen.getByRole('button', { name: /cancel subscription/i }), + ); + + expect(screen.getByRole('dialog')).toBeInTheDocument(); + expect( + screen.getByText(/reach out to our support team/i), + ).toBeInTheDocument(); + expect( + screen.getByRole('button', { name: /keep subscription/i }), + ).toBeInTheDocument(); + expect( + screen.getByRole('button', { name: /contact support/i }), + ).toBeInTheDocument(); + }); + + it('sends mailto to cloud-support with correct subject on Contact Support', async () => { + const realCreateElement = document.createElement.bind(document); + const mockClick = jest.fn(); + const mockAnchor = { href: '', click: mockClick }; + jest.spyOn(document, 'createElement').mockImplementation((tag: string) => { + if (tag === 'a') { + return mockAnchor as unknown as HTMLAnchorElement; + } + return realCreateElement(tag); + }); + + const user = userEvent.setup({ pointerEventsCheck: 0 }); + render(); + + await user.click( + screen.getByRole('button', { name: /cancel subscription/i }), + ); + await user.click(screen.getByRole('button', { name: /contact support/i })); + + expect(mockAnchor.href).toContain('mailto:cloud-support@signoz.io'); + expect(mockAnchor.href).toContain('Cancel%20My%20SigNoz%20Subscription'); + expect(mockClick).toHaveBeenCalledTimes(1); + + jest.restoreAllMocks(); + }); +}); diff --git a/frontend/src/container/BillingContainer/CancelSubscriptionBanner.tsx b/frontend/src/container/BillingContainer/CancelSubscriptionBanner.tsx new file mode 100644 index 0000000000..771094d964 --- /dev/null +++ b/frontend/src/container/BillingContainer/CancelSubscriptionBanner.tsx @@ -0,0 +1,106 @@ +import { useState } from 'react'; +import { X } from '@signozhq/icons'; +import { Button, DialogWrapper } from '@signozhq/ui'; +import logEvent from 'api/common/logEvent'; +import { pick } from 'lodash-es'; +import { useAppContext } from 'providers/App/App'; +import { getBaseUrl } from 'utils/basePath'; + +import styles from './CancelSubscriptionBanner.module.scss'; + +function CancelSubscriptionBanner(): JSX.Element { + const [open, setOpen] = useState(false); + const { user, org } = useAppContext(); + + const handleOpenCancelDialog = (): void => { + void logEvent('Billing : Cancel Subscription Clicked', { + user: pick(user, ['email', 'displayName', 'role', 'organization']), + role: user?.role, + }); + setOpen(true); + }; + + const handleContactSupport = (): void => { + void logEvent('Billing : Cancel Subscription Confirmed', { + user: pick(user, ['email', 'displayName', 'role', 'organization']), + role: user?.role, + }); + const subject = encodeURIComponent('Cancel My SigNoz Subscription'); + const orgName = org?.[0]?.displayName ?? ''; + const body = encodeURIComponent( + [ + 'Hi SigNoz Team,', + '', + 'I would like to cancel my SigNoz Cloud subscription.', + 'Please find my account details below.', + '', + 'Account Details:', + ` • SigNoz URL: ${getBaseUrl()}`, + ...(orgName ? [` • Organization: ${orgName}`] : []), + ` • Account Email: ${user?.email ?? ''}`, + '', + 'Reason for Cancellation:', + '[Please share the reason for cancellation]', + '', + 'Additional feedback (optional):', + '[Any other feedback]', + '', + 'Regards,', + '[user name or team name]', + ].join('\n'), + ); + const link = document.createElement('a'); + link.href = `mailto:cloud-support@signoz.io?subject=${subject}&body=${body}`; + link.click(); + setOpen(false); + }; + + const footer = ( + <> + + + + ); + + return ( + <> +
+
+ Cancel Subscription + Cancel your SigNoz subscription. +
+ +
+ +

+ To cancel your SigNoz subscription, please reach out to our support team. + We'll be happy to assist you. +

+
+ + ); +} + +export default CancelSubscriptionBanner; diff --git a/frontend/src/tests/test-utils.tsx b/frontend/src/tests/test-utils.tsx index b030b18e2a..e26096a2a9 100644 --- a/frontend/src/tests/test-utils.tsx +++ b/frontend/src/tests/test-utils.tsx @@ -119,7 +119,7 @@ export function getAppContextMock( status: '', updated_at: '0', }, - state: LicenseState.ACTIVE, + state: LicenseState.ACTIVATED, status: LicenseStatus.VALID, platform: LicensePlatform.CLOUD, created_at: '0', diff --git a/frontend/src/types/api/licensesV3/getActive.ts b/frontend/src/types/api/licensesV3/getActive.ts index 654a70a857..cc12feb15b 100644 --- a/frontend/src/types/api/licensesV3/getActive.ts +++ b/frontend/src/types/api/licensesV3/getActive.ts @@ -11,7 +11,7 @@ export enum LicenseStatus { export enum LicenseState { DEFAULTED = 'DEFAULTED', - ACTIVE = 'ACTIVE', + ACTIVATED = 'ACTIVATED', EXPIRED = 'EXPIRED', ISSUED = 'ISSUED', EVALUATING = 'EVALUATING', From 738ae6f21b05836c038a019ca243a931e48555e8 Mon Sep 17 00:00:00 2001 From: Pandey Date: Tue, 28 Apr 2026 18:02:35 +0530 Subject: [PATCH 09/19] docs: add servers block to the openapi spec (#11129) * docs: add servers block to the openapi spec * docs: add info block --- docs/api/openapi.yml | 18 ++++++++++++++++++ pkg/signoz/openapi.go | 27 +++++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/docs/api/openapi.yml b/docs/api/openapi.yml index 9c83c968bd..8617c30370 100644 --- a/docs/api/openapi.yml +++ b/docs/api/openapi.yml @@ -5296,7 +5296,12 @@ components: scheme: bearer type: http info: + contact: + email: support@signoz.io + name: SigNoz Support + url: https://signoz.io description: OpenTelemetry-Native Logs, Metrics and Traces in a single pane + termsOfService: https://signoz.io/terms-of-service/ title: SigNoz version: "" openapi: 3.0.3 @@ -17512,3 +17517,16 @@ paths: summary: Replace variables tags: - querier +servers: +- description: The fully qualified URL to the SigNoz APIServer. + url: https://{host}:{port}{base_path} + variables: + base_path: + default: / + description: The base path of the SigNoz APIServer + host: + default: localhost + description: The host of the SigNoz APIServer + port: + default: "8080" + description: The port of the SigNoz APIServer diff --git a/pkg/signoz/openapi.go b/pkg/signoz/openapi.go index 5e272670e4..b5d2ea9b69 100644 --- a/pkg/signoz/openapi.go +++ b/pkg/signoz/openapi.go @@ -98,8 +98,31 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta return defaultDefName })) - reflector.SpecSchema().SetTitle("SigNoz") - reflector.SpecSchema().SetDescription("OpenTelemetry-Native Logs, Metrics and Traces in a single pane") + reflector.Spec.WithInfo(*(&openapi3.Info{}). + WithTitle("SigNoz"). + WithDescription("OpenTelemetry-Native Logs, Metrics and Traces in a single pane"). + WithTermsOfService("https://signoz.io/terms-of-service/"). + WithContact(*(&openapi3.Contact{}). + WithName("SigNoz Support"). + WithURL("https://signoz.io"). + WithEmail("support@signoz.io")), + ) + + reflector.Spec.WithServers( + // Default server + *(&openapi3.Server{}).WithURL("https://{host}:{port}{base_path}"). + WithDescription("The fully qualified URL to the SigNoz APIServer."). + WithVariablesItem("host", *(&openapi3.ServerVariable{}). + WithDefault("localhost"). + WithDescription("The host of the SigNoz APIServer")). + WithVariablesItem("port", *(&openapi3.ServerVariable{}). + WithDefault("8080"). + WithDescription("The port of the SigNoz APIServer")). + WithVariablesItem("base_path", *(&openapi3.ServerVariable{}). + WithDefault("/"). + WithDescription("The base path of the SigNoz APIServer")), + ) + reflector.SpecSchema().SetAPIKeySecurity(authtypes.IdentNProviderAPIKey.StringValue(), "SigNoz-Api-Key", openapi.InHeader, "API Keys") reflector.SpecSchema().SetHTTPBearerTokenSecurity(authtypes.IdentNProviderTokenizer.StringValue(), "Tokenizer", "Tokens generated by the tokenizer") From 584bf2fe743b68fbab6c2a071656cb579703e75c Mon Sep 17 00:00:00 2001 From: Piyush Singariya Date: Tue, 28 Apr 2026 20:59:46 +0530 Subject: [PATCH 10/19] chore: add json enabled as feature flag for FE (#11050) * chore: add json enabled as feature flag for FE * fix: still using global bool * feat: flagger integration in flow * fix: flagger threaded into tests * test: removed nil checks * fix: minor changes * chore: rename field * chore: remove querybuilder helper * fix: unit tests * fix: correct env var * fix: lint fix * fix: lint * chore: replace flag --- ee/query-service/app/api/featureFlags.go | 9 ++ ee/query-service/app/server.go | 1 + frontend/src/components/LogDetail/index.tsx | 2 +- frontend/src/constants/features.ts | 2 +- .../LogDetailedView/BodyTitleRenderer.tsx | 2 +- .../TableView/useAsyncJSONProcessing.ts | 2 +- pkg/flagger/flaggertest/flaggertest.go | 49 ++++++++ pkg/flagger/registry.go | 9 ++ pkg/querier/signozquerier/provider.go | 13 ++- pkg/query-service/app/http_handler.go | 9 ++ .../app/logparsingpipeline/controller.go | 15 ++- pkg/query-service/app/server.go | 1 + pkg/query-service/rules/setups_test.go | 20 +++- .../rules/threshold_rule_test.go | 4 +- pkg/querybuilder/agg_rewrite.go | 17 ++- pkg/querybuilder/constants.go | 16 --- pkg/querybuilder/fallback_expr.go | 3 +- pkg/querybuilder/where_clause_visitor.go | 12 +- pkg/querybuilder/where_clause_visitor_test.go | 31 ++--- pkg/signoz/handler_test.go | 2 +- pkg/signoz/module.go | 2 + pkg/signoz/module_test.go | 2 +- pkg/signoz/signoz.go | 3 +- pkg/telemetryaudit/statement_builder.go | 7 +- pkg/telemetryaudit/statement_builder_test.go | 10 +- pkg/telemetrylogs/condition_builder.go | 20 +++- pkg/telemetrylogs/condition_builder_test.go | 21 ++-- pkg/telemetrylogs/const.go | 7 +- pkg/telemetrylogs/field_mapper.go | 25 +++-- pkg/telemetrylogs/field_mapper_test.go | 13 ++- .../filter_expr_like_warning_test.go | 11 +- .../filter_expr_logs_body_json_test.go | 12 +- pkg/telemetrylogs/filter_expr_logs_test.go | 11 +- pkg/telemetrylogs/json_stmt_builder_test.go | 52 ++------- pkg/telemetrylogs/statement_builder.go | 30 ++++- pkg/telemetrylogs/stmt_builder_test.go | 106 +++++++++--------- pkg/telemetrymetadata/metadata.go | 27 +++-- pkg/telemetrymetadata/metadata_query_test.go | 2 + pkg/telemetrymetadata/metadata_test.go | 23 ++-- pkg/telemetrymeter/stmt_builder_test.go | 7 +- .../statement_builder.go | 11 ++ .../statement_builder_test.go | 4 + pkg/telemetrytraces/statement_builder.go | 7 +- pkg/telemetrytraces/stmt_builder_test.go | 25 ++++- .../trace_operator_cte_builder.go | 3 + .../trace_operator_cte_builder_test.go | 11 +- .../trace_operator_statement_builder.go | 3 + pkg/telemetrytraces/trace_time_range_test.go | 5 +- .../tests/querier_json_body/conftest.py | 2 +- 49 files changed, 435 insertions(+), 246 deletions(-) create mode 100644 pkg/flagger/flaggertest/flaggertest.go diff --git a/ee/query-service/app/api/featureFlags.go b/ee/query-service/app/api/featureFlags.go index 4c5ebe3bb1..5cdc54c28c 100644 --- a/ee/query-service/app/api/featureFlags.go +++ b/ee/query-service/app/api/featureFlags.go @@ -71,6 +71,15 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { Route: "", }) + bodyJSONQuery := ah.Signoz.Flagger.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, evalCtx) + featureSet = append(featureSet, &licensetypes.Feature{ + Name: valuer.NewString(flagger.FeatureUseJSONBody.String()), + Active: bodyJSONQuery, + Usage: 0, + UsageLimit: -1, + Route: "", + }) + if constants.IsDotMetricsEnabled { for idx, feature := range featureSet { if feature.Name == licensetypes.DotMetricsEnabled { diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index d3cdb365de..c96c34dbc1 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -105,6 +105,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) { signoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations, reader, + signoz.Flagger, ) if err != nil { return nil, err diff --git a/frontend/src/components/LogDetail/index.tsx b/frontend/src/components/LogDetail/index.tsx index b8f5f15e28..c9f4b498c4 100644 --- a/frontend/src/components/LogDetail/index.tsx +++ b/frontend/src/components/LogDetail/index.tsx @@ -83,7 +83,7 @@ function LogDetailInner({ const [isFilterVisible, setIsFilterVisible] = useState(false); const { featureFlags } = useAppContext(); const isBodyJsonQueryEnabled = - featureFlags?.find((flag) => flag.name === FeatureKeys.BODY_JSON_ENABLED) + featureFlags?.find((flag) => flag.name === FeatureKeys.USE_JSON_BODY) ?.active || false; const [filters, setFilters] = useState(null); diff --git a/frontend/src/constants/features.ts b/frontend/src/constants/features.ts index cafd048027..928b1d7647 100644 --- a/frontend/src/constants/features.ts +++ b/frontend/src/constants/features.ts @@ -9,5 +9,5 @@ export enum FeatureKeys { ANOMALY_DETECTION = 'anomaly_detection', ONBOARDING_V3 = 'onboarding_v3', DOT_METRICS_ENABLED = 'dot_metrics_enabled', - BODY_JSON_ENABLED = 'body_json_enabled', + USE_JSON_BODY = 'use_json_body', } diff --git a/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx b/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx index 35df0baf04..a9f912526f 100644 --- a/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx +++ b/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx @@ -48,7 +48,7 @@ function BodyTitleRenderer({ const cleanedNodeKey = removeObjectFromString(nodeKey); const isBodyJsonQueryEnabled = - featureFlags?.find((flag) => flag.name === FeatureKeys.BODY_JSON_ENABLED) + featureFlags?.find((flag) => flag.name === FeatureKeys.USE_JSON_BODY) ?.active || false; // Group by is supported only for body json query enabled and not for array elements diff --git a/frontend/src/container/LogDetailedView/TableView/useAsyncJSONProcessing.ts b/frontend/src/container/LogDetailedView/TableView/useAsyncJSONProcessing.ts index 8213fc14f5..933fc8a9b8 100644 --- a/frontend/src/container/LogDetailedView/TableView/useAsyncJSONProcessing.ts +++ b/frontend/src/container/LogDetailedView/TableView/useAsyncJSONProcessing.ts @@ -31,7 +31,7 @@ const useAsyncJSONProcessing = ( const processingRef = useRef(false); const { featureFlags } = useAppContext(); const isBodyJsonQueryEnabled = - featureFlags?.find((flag) => flag.name === FeatureKeys.BODY_JSON_ENABLED) + featureFlags?.find((flag) => flag.name === FeatureKeys.USE_JSON_BODY) ?.active || false; // eslint-disable-next-line sonarjs/cognitive-complexity diff --git a/pkg/flagger/flaggertest/flaggertest.go b/pkg/flagger/flaggertest/flaggertest.go new file mode 100644 index 0000000000..9dc0488ed0 --- /dev/null +++ b/pkg/flagger/flaggertest/flaggertest.go @@ -0,0 +1,49 @@ +// Package flaggertest provides helpers for creating Flagger instances in tests. +package flaggertest + +import ( + "context" + "testing" + + "github.com/SigNoz/signoz/pkg/flagger" + "github.com/SigNoz/signoz/pkg/flagger/configflagger" + "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" + "github.com/stretchr/testify/require" +) + +// New returns a Flagger with all flags at their registry defaults (all disabled). +// Use this in tests that do not need any feature flag enabled. +func New(t *testing.T) flagger.Flagger { + t.Helper() + registry := flagger.MustNewRegistry() + fl, err := flagger.New( + context.Background(), + instrumentationtest.New().ToProviderSettings(), + flagger.Config{}, + registry, + configflagger.NewFactory(registry), + ) + require.NoError(t, err) + return fl +} + +// WithUseJSONBody returns a Flagger with use_json_body set to the given value. +func WithUseJSONBody(t *testing.T, enabled bool) flagger.Flagger { + t.Helper() + registry := flagger.MustNewRegistry() + cfg := flagger.Config{} + if enabled { + cfg.Config.Boolean = map[string]bool{ + flagger.FeatureUseJSONBody.String(): true, + } + } + fl, err := flagger.New( + context.Background(), + instrumentationtest.New().ToProviderSettings(), + cfg, + registry, + configflagger.NewFactory(registry), + ) + require.NoError(t, err) + return fl +} diff --git a/pkg/flagger/registry.go b/pkg/flagger/registry.go index 282e75ddbb..a382b94328 100644 --- a/pkg/flagger/registry.go +++ b/pkg/flagger/registry.go @@ -8,6 +8,7 @@ var ( FeatureHideRootUser = featuretypes.MustNewName("hide_root_user") FeatureGetMetersFromZeus = featuretypes.MustNewName("get_meters_from_zeus") FeaturePutMetersInZeus = featuretypes.MustNewName("put_meters_in_zeus") + FeatureUseJSONBody = featuretypes.MustNewName("use_json_body") ) func MustNewRegistry() featuretypes.Registry { @@ -52,6 +53,14 @@ func MustNewRegistry() featuretypes.Registry { DefaultVariant: featuretypes.MustNewName("disabled"), Variants: featuretypes.NewBooleanVariants(), }, + &featuretypes.Feature{ + Name: FeatureUseJSONBody, + Kind: featuretypes.KindBoolean, + Stage: featuretypes.StageExperimental, + Description: "Controls whether body JSON querying is enabled", + DefaultVariant: featuretypes.MustNewName("disabled"), + Variants: featuretypes.NewBooleanVariants(), + }, ) if err != nil { panic(err) diff --git a/pkg/querier/signozquerier/provider.go b/pkg/querier/signozquerier/provider.go index f0e3ba60ee..c9cf8910e1 100644 --- a/pkg/querier/signozquerier/provider.go +++ b/pkg/querier/signozquerier/provider.go @@ -72,13 +72,14 @@ func newProvider( telemetrymetadata.DBName, telemetrymetadata.AttributesMetadataLocalTableName, telemetrymetadata.ColumnEvolutionMetadataTableName, + flagger, ) // Create trace statement builder traceFieldMapper := telemetrytraces.NewFieldMapper() traceConditionBuilder := telemetrytraces.NewConditionBuilder(traceFieldMapper) - traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil) + traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil, flagger) traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder( settings, telemetryMetadataStore, @@ -86,6 +87,7 @@ func newProvider( traceConditionBuilder, traceAggExprRewriter, telemetryStore, + flagger, ) // Create trace operator statement builder @@ -96,17 +98,19 @@ func newProvider( traceConditionBuilder, traceStmtBuilder, traceAggExprRewriter, + flagger, ) // Create log statement builder - logFieldMapper := telemetrylogs.NewFieldMapper() - logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper) + logFieldMapper := telemetrylogs.NewFieldMapper(flagger) + logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, flagger) logAggExprRewriter := querybuilder.NewAggExprRewriter( settings, telemetrylogs.DefaultFullTextColumn, logFieldMapper, logConditionBuilder, telemetrylogs.GetBodyJSONKey, + flagger, ) logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder( settings, @@ -116,6 +120,7 @@ func newProvider( logAggExprRewriter, telemetrylogs.DefaultFullTextColumn, telemetrylogs.GetBodyJSONKey, + flagger, ) // Create audit statement builder @@ -127,6 +132,7 @@ func newProvider( auditFieldMapper, auditConditionBuilder, nil, + flagger, ) auditStmtBuilder := telemetryaudit.NewAuditQueryStatementBuilder( settings, @@ -136,6 +142,7 @@ func newProvider( auditAggExprRewriter, telemetryaudit.DefaultFullTextColumn, nil, + flagger, ) // Create metric statement builder diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index c6442bfe18..e12ecf09e0 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -1774,6 +1774,15 @@ func (aH *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { Route: "", }) + bodyJSONQuery := aH.Signoz.Flagger.BooleanOrEmpty(r.Context(), flagger.FeatureUseJSONBody, evalCtx) + featureSet = append(featureSet, &licensetypes.Feature{ + Name: valuer.NewString(flagger.FeatureUseJSONBody.String()), + Active: bodyJSONQuery, + Usage: 0, + UsageLimit: -1, + Route: "", + }) + if constants.IsDotMetricsEnabled { for idx, feature := range featureSet { if feature.Name == licensetypes.DotMetricsEnabled { diff --git a/pkg/query-service/app/logparsingpipeline/controller.go b/pkg/query-service/app/logparsingpipeline/controller.go index 8aa655f00d..953fdc8140 100644 --- a/pkg/query-service/app/logparsingpipeline/controller.go +++ b/pkg/query-service/app/logparsingpipeline/controller.go @@ -11,15 +11,16 @@ import ( "github.com/google/uuid" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/query-service/agentConf" "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/model" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/query-service/utils" - "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/featuretypes" "github.com/SigNoz/signoz/pkg/types/opamptypes" "github.com/SigNoz/signoz/pkg/types/pipelinetypes" "github.com/SigNoz/signoz/pkg/valuer" @@ -38,18 +39,21 @@ type LogParsingPipelineController struct { GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, error) // TODO(Piyush): remove with qbv5 migration reader interfaces.Reader + fl flagger.Flagger } func NewLogParsingPipelinesController( sqlStore sqlstore.SQLStore, getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, error), reader interfaces.Reader, + fl flagger.Flagger, ) (*LogParsingPipelineController, error) { repo := NewRepo(sqlStore) return &LogParsingPipelineController{ Repo: repo, GetIntegrationPipelines: getIntegrationPipelines, reader: reader, + fl: fl, }, nil } @@ -363,14 +367,14 @@ func (pc *LogParsingPipelineController) AgentFeatureType() agentConf.AgentFeatur // Implements agentConf.AgentFeature interface. // RecommendAgentConfig generates the collector config to be sent to agents. -// The normalize pipeline (when BodyJSONQueryEnabled) is injected here, after +// The normalize pipeline (when use_json_body feature flag is on) is injected here, after // rawPipelineData is serialized. So it is only present in the config sent to // the collector and never persisted to the database as part of the user's pipeline list. // // NOTE: The configId sent to agents is derived from the pipeline version number // (e.g. "LogPipelines:5"), not the YAML content. If server-side logic changes -// the generated YAML without bumping the version (e.g. toggling BodyJSONQueryEnabled -// or updating operator IfExpressions), agents that already applied that version will +// the generated YAML without bumping the version (e.g. toggling the use_json_body +// flag or updating operator IfExpressions), agents that already applied that version will // not re-apply the new config. In such cases, users must save a new pipeline version // via the API to force agents to pick up the change. func (pc *LogParsingPipelineController) RecommendAgentConfig( @@ -398,7 +402,8 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig( return nil, "", err } - if querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if pc.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(orgId)) { // add default normalize pipeline at the beginning, only for sending to collector enrichedPipelines = append([]pipelinetypes.GettablePipeline{pc.getNormalizePipeline()}, enrichedPipelines...) } diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index bf469c8f22..c202494a4e 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -93,6 +93,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) { signoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations, reader, + signoz.Flagger, ) if err != nil { return nil, err diff --git a/pkg/query-service/rules/setups_test.go b/pkg/query-service/rules/setups_test.go index 34736f7301..e03c2ad8dc 100644 --- a/pkg/query-service/rules/setups_test.go +++ b/pkg/query-service/rules/setups_test.go @@ -15,6 +15,8 @@ import ( "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest" "github.com/stretchr/testify/require" + + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" ) func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.TelemetryStore) (querier.Querier, *telemetrytypestest.MockMetadataStore) { @@ -54,8 +56,8 @@ func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.Teleme ), metadataStore } -func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap map[string][]*telemetrytypes.TelemetryFieldKey) querier.Querier { - +func prepareQuerierForLogs(t *testing.T, telemetryStore telemetrystore.TelemetryStore, keysMap map[string][]*telemetrytypes.TelemetryFieldKey) querier.Querier { + t.Helper() providerSettings := instrumentationtest.New().ToProviderSettings() metadataStore := telemetrytypestest.NewMockMetadataStore() @@ -66,14 +68,16 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap } metadataStore.KeysMap = keysMap - logFieldMapper := telemetrylogs.NewFieldMapper() - logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper) + fl := flaggertest.New(t) + logFieldMapper := telemetrylogs.NewFieldMapper(fl) + logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, fl) logAggExprRewriter := querybuilder.NewAggExprRewriter( providerSettings, telemetrylogs.DefaultFullTextColumn, logFieldMapper, logConditionBuilder, telemetrylogs.GetBodyJSONKey, + fl, ) logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder( providerSettings, @@ -83,6 +87,7 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap logAggExprRewriter, telemetrylogs.DefaultFullTextColumn, telemetrylogs.GetBodyJSONKey, + fl, ) return querier.New( @@ -100,7 +105,8 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap ) } -func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysMap map[string][]*telemetrytypes.TelemetryFieldKey) querier.Querier { +func prepareQuerierForTraces(t *testing.T, telemetryStore telemetrystore.TelemetryStore, keysMap map[string][]*telemetrytypes.TelemetryFieldKey) querier.Querier { + t.Helper() providerSettings := instrumentationtest.New().ToProviderSettings() metadataStore := telemetrytypestest.NewMockMetadataStore() @@ -116,7 +122,8 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM traceFieldMapper := telemetrytraces.NewFieldMapper() traceConditionBuilder := telemetrytraces.NewConditionBuilder(traceFieldMapper) - traceAggExprRewriter := querybuilder.NewAggExprRewriter(providerSettings, nil, traceFieldMapper, traceConditionBuilder, nil) + fl := flaggertest.New(t) + traceAggExprRewriter := querybuilder.NewAggExprRewriter(providerSettings, nil, traceFieldMapper, traceConditionBuilder, nil, fl) traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder( providerSettings, metadataStore, @@ -124,6 +131,7 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM traceConditionBuilder, traceAggExprRewriter, telemetryStore, + fl, ) return querier.New( diff --git a/pkg/query-service/rules/threshold_rule_test.go b/pkg/query-service/rules/threshold_rule_test.go index d91120566a..50b8cf7cf1 100644 --- a/pkg/query-service/rules/threshold_rule_test.go +++ b/pkg/query-service/rules/threshold_rule_test.go @@ -829,7 +829,7 @@ func TestThresholdRuleTracesLink(t *testing.T) { WithArgs(nil, nil, nil, nil, nil, nil, nil). WillReturnRows(rows) - querier := prepareQuerierForTraces(telemetryStore, keysMap) + querier := prepareQuerierForTraces(t, telemetryStore, keysMap) postableRule.RuleCondition.CompareOperator = c.compareOperator postableRule.RuleCondition.MatchType = c.matchType @@ -946,7 +946,7 @@ func TestThresholdRuleLogsLink(t *testing.T) { WithArgs(nil, nil, nil, nil, nil, nil, nil, nil, nil, nil). WillReturnRows(rows) - querier := prepareQuerierForLogs(telemetryStore, keysMap) + querier := prepareQuerierForLogs(t, telemetryStore, keysMap) postableRule.RuleCondition.CompareOperator = c.compareOperator postableRule.RuleCondition.MatchType = c.matchType diff --git a/pkg/querybuilder/agg_rewrite.go b/pkg/querybuilder/agg_rewrite.go index cb8d714954..0b6b032c25 100644 --- a/pkg/querybuilder/agg_rewrite.go +++ b/pkg/querybuilder/agg_rewrite.go @@ -9,6 +9,8 @@ import ( chparser "github.com/AfterShip/clickhouse-sql-parser/parser" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" + "github.com/SigNoz/signoz/pkg/types/featuretypes" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/valuer" @@ -21,6 +23,7 @@ type aggExprRewriter struct { fieldMapper qbtypes.FieldMapper conditionBuilder qbtypes.ConditionBuilder jsonKeyToKey qbtypes.JsonKeyToFieldFunc + flagger flagger.Flagger } var _ qbtypes.AggExprRewriter = (*aggExprRewriter)(nil) @@ -31,6 +34,7 @@ func NewAggExprRewriter( fieldMapper qbtypes.FieldMapper, conditionBuilder qbtypes.ConditionBuilder, jsonKeyToKey qbtypes.JsonKeyToFieldFunc, + fl flagger.Flagger, ) *aggExprRewriter { set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite") @@ -40,6 +44,7 @@ func NewAggExprRewriter( fieldMapper: fieldMapper, conditionBuilder: conditionBuilder, jsonKeyToKey: jsonKeyToKey, + flagger: fl, } } @@ -86,6 +91,7 @@ func (r *aggExprRewriter) Rewrite( r.fieldMapper, r.conditionBuilder, r.jsonKeyToKey, + r.flagger, ) // Rewrite the first select item (our expression) if err := sel.SelectItems[0].Accept(visitor); err != nil { @@ -138,6 +144,7 @@ type exprVisitor struct { fieldMapper qbtypes.FieldMapper conditionBuilder qbtypes.ConditionBuilder jsonKeyToKey qbtypes.JsonKeyToFieldFunc + flagger flagger.Flagger Modified bool chArgs []any isRate bool @@ -153,6 +160,7 @@ func newExprVisitor( fieldMapper qbtypes.FieldMapper, conditionBuilder qbtypes.ConditionBuilder, jsonKeyToKey qbtypes.JsonKeyToFieldFunc, + fl flagger.Flagger, ) *exprVisitor { return &exprVisitor{ ctx: ctx, @@ -164,6 +172,7 @@ func newExprVisitor( fieldMapper: fieldMapper, conditionBuilder: conditionBuilder, jsonKeyToKey: jsonKeyToKey, + flagger: fl, } } @@ -197,6 +206,9 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { dataType = telemetrytypes.FieldDataTypeFloat64 } + // + bodyJSONEnabled := v.flagger.BooleanOrEmpty(v.ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) + // Handle *If functions with predicate + values if aggFunc.FuncCombinator { // Map the predicate (last argument) @@ -209,6 +221,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { FieldKeys: v.fieldKeys, FieldMapper: v.fieldMapper, ConditionBuilder: v.conditionBuilder, + BodyJSONEnabled: bodyJSONEnabled, FullTextColumn: v.fullTextColumn, JsonKeyToKey: v.jsonKeyToKey, StartNs: v.startNs, @@ -237,7 +250,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { for i := 0; i < len(args)-1; i++ { origVal := args[i].String() fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal) - expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey) + expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey, bodyJSONEnabled) if err != nil { return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal) } @@ -255,7 +268,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { for i, arg := range args { orig := arg.String() fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig) - expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey) + expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey, bodyJSONEnabled) if err != nil { return err } diff --git a/pkg/querybuilder/constants.go b/pkg/querybuilder/constants.go index d3bd1afab5..7b82b4530a 100644 --- a/pkg/querybuilder/constants.go +++ b/pkg/querybuilder/constants.go @@ -1,9 +1,5 @@ package querybuilder -import ( - "os" -) - const ( TrueConditionLiteral = "true" SkipConditionLiteral = "__skip__" @@ -13,15 +9,3 @@ const ( var ( SkippableConditionLiterals = []string{SkipConditionLiteral, ErrorConditionLiteral} ) - -var ( - BodyJSONQueryEnabled = GetOrDefaultEnv("BODY_JSON_QUERY_ENABLED", "false") == "true" -) - -func GetOrDefaultEnv(key string, fallback string) string { - v := os.Getenv(key) - if len(v) == 0 { - return fallback - } - return v -} diff --git a/pkg/querybuilder/fallback_expr.go b/pkg/querybuilder/fallback_expr.go index aaab790f6f..4eca231f21 100644 --- a/pkg/querybuilder/fallback_expr.go +++ b/pkg/querybuilder/fallback_expr.go @@ -27,6 +27,7 @@ func CollisionHandledFinalExpr( keys map[string][]*telemetrytypes.TelemetryFieldKey, requiredDataType telemetrytypes.FieldDataType, jsonKeyToKey qbtypes.JsonKeyToFieldFunc, + bodyJSONEnabled bool, ) (string, []any, error) { if requiredDataType != telemetrytypes.FieldDataTypeString && @@ -106,7 +107,7 @@ func CollisionHandledFinalExpr( } // first if condition covers the older tests and second if condition covers the array conditions - if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil { + if !bodyJSONEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil { return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column") } else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) { return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name) diff --git a/pkg/querybuilder/where_clause_visitor.go b/pkg/querybuilder/where_clause_visitor.go index bb4b5baf20..07feebce0b 100644 --- a/pkg/querybuilder/where_clause_visitor.go +++ b/pkg/querybuilder/where_clause_visitor.go @@ -36,6 +36,7 @@ type filterExpressionVisitor struct { builder *sqlbuilder.SelectBuilder fullTextColumn *telemetrytypes.TelemetryFieldKey jsonKeyToKey qbtypes.JsonKeyToFieldFunc + bodyJSONEnabled bool skipResourceFilter bool skipFullTextFilter bool skipFunctionCalls bool @@ -56,6 +57,7 @@ type FilterExprVisitorOpts struct { Builder *sqlbuilder.SelectBuilder FullTextColumn *telemetrytypes.TelemetryFieldKey JsonKeyToKey qbtypes.JsonKeyToFieldFunc + BodyJSONEnabled bool SkipResourceFilter bool SkipFullTextFilter bool SkipFunctionCalls bool @@ -76,6 +78,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis builder: opts.Builder, fullTextColumn: opts.FullTextColumn, jsonKeyToKey: opts.JsonKeyToKey, + bodyJSONEnabled: opts.BodyJSONEnabled, skipResourceFilter: opts.SkipResourceFilter, skipFullTextFilter: opts.SkipFullTextFilter, skipFunctionCalls: opts.SkipFunctionCalls, @@ -751,8 +754,8 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon return ErrorConditionLiteral } - // filter arrays from keys - if BodyJSONQueryEnabled && functionName != "hasToken" { + // TODO(Tushar): thread orgID here to evaluate correctly + if v.bodyJSONEnabled && functionName != "hasToken" { filteredKeys := []*telemetrytypes.TelemetryFieldKey{} for _, key := range keys { if key.FieldDataType.IsArray() { @@ -793,7 +796,7 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon // this is that all other functions only support array fields if key.FieldContext == telemetrytypes.FieldContextBody { var err error - if BodyJSONQueryEnabled { + if v.bodyJSONEnabled { fieldName, err = v.fieldMapper.FieldFor(v.context, v.startNs, v.endNs, key) if err != nil { v.errors = append(v.errors, fmt.Sprintf("failed to get field name for key %s: %s", key.Name, err.Error())) @@ -936,7 +939,8 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any { // Note: Skip this logic if body json query is enabled so we can look up the key inside fields // // TODO(Piyush): After entire migration this is supposed to be removed. - if !BodyJSONQueryEnabled && fieldKey.FieldContext == telemetrytypes.FieldContextBody { + // TODO(Tushar): thread orgID here to evaluate correctly + if fieldKey.FieldContext == telemetrytypes.FieldContextBody && !v.bodyJSONEnabled { fieldKeysForName = append(fieldKeysForName, &fieldKey) } diff --git a/pkg/querybuilder/where_clause_visitor_test.go b/pkg/querybuilder/where_clause_visitor_test.go index c1d549b8da..c9de7613af 100644 --- a/pkg/querybuilder/where_clause_visitor_test.go +++ b/pkg/querybuilder/where_clause_visitor_test.go @@ -79,8 +79,10 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) { } // createTestVisitor creates a filterExpressionVisitor for testing VisitKey. -func createTestVisitor(fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey, ignoreNotFoundKeys bool) *filterExpressionVisitor { +func createTestVisitor(t *testing.T, fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey, ignoreNotFoundKeys bool) *filterExpressionVisitor { + t.Helper() return &filterExpressionVisitor{ + context: t.Context(), logger: slog.Default(), fieldKeys: fieldKeys, ignoreNotFoundKeys: ignoreNotFoundKeys, @@ -572,7 +574,7 @@ func TestVisitKey(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - visitor := createTestVisitor(tt.fieldKeys, tt.ignoreNotFoundKeys) + visitor := createTestVisitor(t, tt.fieldKeys, tt.ignoreNotFoundKeys) keyCtx := parseKeyContext(tt.keyText) if keyCtx == nil { @@ -776,7 +778,8 @@ type visitComparisonCase struct { // visitComparisonOpts builds the two FilterExprVisitorOpts shared by all // TestVisitComparison_* tests. -func visitComparisonOpts() (rsbOpts, sbOpts FilterExprVisitorOpts) { +func visitComparisonOpts(t *testing.T) (rsbOpts, sbOpts FilterExprVisitorOpts) { + t.Helper() allVariable := map[string]qbtypes.VariableItem{ "service": { Type: qbtypes.DynamicVariableType, @@ -790,6 +793,7 @@ func visitComparisonOpts() (rsbOpts, sbOpts FilterExprVisitorOpts) { FieldDataType: telemetrytypes.FieldDataTypeString, } rsbOpts = FilterExprVisitorOpts{ + Context: t.Context(), FieldKeys: visitTestKeys, ConditionBuilder: &resourceConditionBuilder{}, Variables: allVariable, @@ -799,6 +803,7 @@ func visitComparisonOpts() (rsbOpts, sbOpts FilterExprVisitorOpts) { IgnoreNotFoundKeys: true, } sbOpts = FilterExprVisitorOpts{ + Context: t.Context(), FieldKeys: visitTestKeys, ConditionBuilder: &conditionBuilder{}, Variables: allVariable, @@ -814,7 +819,7 @@ func visitComparisonOpts() (rsbOpts, sbOpts FilterExprVisitorOpts) { // TestVisitComparison_AND covers AND expressions with attribute keys (a, b, c → // TrueConditionLiteral in RSB) and resource keys (x, y, z → "{name}_cond" in RSB). func TestVisitComparison_AND(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { name: "single attribute key", @@ -892,7 +897,7 @@ func TestVisitComparison_AND(t *testing.T) { // - NOT inside a comparison (e.g. NOT LIKE, NOT EXISTS): the inner NOT is folded // into the operator token; conditionBuilder ignores it, so no extra NOT is emitted. func TestVisitComparison_NOT(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { // Unary NOT on an attribute key: NOT(SkipConditionLiteral) → SkipConditionLiteral (guard). @@ -985,7 +990,7 @@ func TestVisitComparison_NOT(t *testing.T) { // SkipResourceFilter to false when an OR token is detected in the expression, // so resource keys become visible in sbOpts for all cases in this suite. func TestVisitComparison_OR(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { name: "resource OR resource", @@ -1086,7 +1091,7 @@ func TestVisitComparison_OR(t *testing.T) { // TestVisitComparison_Precedence covers AND/OR/NOT operator precedence // (AND binds tighter than OR; NOT binds tightest). func TestVisitComparison_Precedence(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { // a→true short-circuits OR. @@ -1168,7 +1173,7 @@ func TestVisitComparison_Precedence(t *testing.T) { // VisitPrimary adds one extra layer of parens around real conditions; // TrueConditionLiteral passes through unwrapped. func TestVisitComparison_Parens(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { // RSB: SkipConditionLiteral passes through unwrapped. SB: VisitPrimary wraps in parens. @@ -1271,7 +1276,7 @@ func TestVisitComparison_Parens(t *testing.T) { // rsbOpts has SkipFullTextFilter=true → TrueConditionLiteral. // sbOpts has SkipFullTextFilter=false, FullTextColumn=bodyCol → "body_cond". func TestVisitComparison_FullText(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { name: "standalone full-text term", @@ -1428,7 +1433,7 @@ func TestVisitComparison_FullText(t *testing.T) { // Equality with __all__ does NOT short-circuit — the variable resolves to the literal // "__all__" string and ConditionFor is called normally. func TestVisitComparison_AllVariable(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { name: "IN allVariable alone", @@ -1536,7 +1541,7 @@ func TestVisitComparison_AllVariable(t *testing.T) { // sbOpts has SkipFunctionCalls=false; has/hasAny/hasAll only support FieldContextBody, // so calls on attribute/resource keys return an error. func TestVisitComparison_FunctionCalls(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { name: "has on attribute key", @@ -1615,7 +1620,7 @@ func TestVisitComparison_FunctionCalls(t *testing.T) { // (no keys resolved); SkipConditionLiteral short-circuits OR and is stripped from AND. // sbOpts has IgnoreNotFoundKeys=false → key lookup appends an error. func TestVisitComparison_UnknownKeys(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { // RSB: unknown_key → SkipConditionLiteral (no keys resolved); stripped from AND; x_cond survives. @@ -1682,7 +1687,7 @@ func TestVisitComparison_UnknownKeys(t *testing.T) { // TestVisitComparison_SkippableLiteralValues guards against two distinct collision risks // involving SkippableConditionLiterals ("true", "__skip__", "__skip_because_of_error__"):. func TestVisitComparison_SkippableLiteralValues(t *testing.T) { - rsbOpts, sbOpts := visitComparisonOpts() + rsbOpts, sbOpts := visitComparisonOpts(t) tests := []visitComparisonCase{ { diff --git a/pkg/signoz/handler_test.go b/pkg/signoz/handler_test.go index 1dac2cda6f..75bdda9a6d 100644 --- a/pkg/signoz/handler_test.go +++ b/pkg/signoz/handler_test.go @@ -52,7 +52,7 @@ func TestNewHandlers(t *testing.T) { userRoleStore := impluser.NewUserRoleStore(sqlstore, providerSettings) userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings), userRoleStore, flagger) - modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, nil, nil) + modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, nil, nil, flagger) querierHandler := querier.NewHandler(providerSettings, nil, nil) registryHandler := factory.NewHandler(nil) diff --git a/pkg/signoz/module.go b/pkg/signoz/module.go index 85aabd8926..8e7d7ec869 100644 --- a/pkg/signoz/module.go +++ b/pkg/signoz/module.go @@ -8,6 +8,7 @@ import ( "github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/modules/apdex" "github.com/SigNoz/signoz/pkg/modules/apdex/implapdex" "github.com/SigNoz/signoz/pkg/modules/authdomain" @@ -102,6 +103,7 @@ func NewModules( userRoleStore authtypes.UserRoleStore, serviceAccount serviceaccount.Module, cloudIntegrationModule cloudintegration.Module, + fl flagger.Flagger, ) Modules { quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore)) orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter) diff --git a/pkg/signoz/module_test.go b/pkg/signoz/module_test.go index 4121fbf0f1..74476ba6b5 100644 --- a/pkg/signoz/module_test.go +++ b/pkg/signoz/module_test.go @@ -56,7 +56,7 @@ func TestNewModules(t *testing.T) { serviceAccount := implserviceaccount.NewModule(implserviceaccount.NewStore(sqlstore), nil, nil, nil, providerSettings, serviceaccount.Config{}) - modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, serviceAccount, implcloudintegration.NewModule()) + modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, serviceAccount, implcloudintegration.NewModule(), flagger) reflectVal := reflect.ValueOf(modules) for i := 0; i < reflectVal.NumField(); i++ { diff --git a/pkg/signoz/signoz.go b/pkg/signoz/signoz.go index fe4b82f83c..220491ddd8 100644 --- a/pkg/signoz/signoz.go +++ b/pkg/signoz/signoz.go @@ -419,6 +419,7 @@ func New( telemetrymetadata.DBName, telemetrymetadata.AttributesMetadataLocalTableName, telemetrymetadata.ColumnEvolutionMetadataTableName, + flagger, ) global, err := factory.NewProviderFromNamedMap( @@ -440,7 +441,7 @@ func New( } // Initialize all modules - modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter, userRoleStore, serviceAccount, cloudIntegrationModule) + modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter, userRoleStore, serviceAccount, cloudIntegrationModule, flagger) // Initialize ruler from the variant-specific provider factories rulerInstance, err := factory.NewProviderFromNamedMap(ctx, providerSettings, config.Ruler, rulerProviderFactories(cache, alertmanager, sqlstore, telemetrystore, telemetryMetadataStore, prometheus, orgGetter, modules.RuleStateHistory, querier, queryParser), "signoz") diff --git a/pkg/telemetryaudit/statement_builder.go b/pkg/telemetryaudit/statement_builder.go index 9a3f88af69..e955ff4c7c 100644 --- a/pkg/telemetryaudit/statement_builder.go +++ b/pkg/telemetryaudit/statement_builder.go @@ -8,6 +8,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/telemetryresourcefilter" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" @@ -36,6 +37,7 @@ func NewAuditQueryStatementBuilder( aggExprRewriter qbtypes.AggExprRewriter, fullTextColumn *telemetrytypes.TelemetryFieldKey, jsonKeyToKey qbtypes.JsonKeyToFieldFunc, + flagger flagger.Flagger, ) *auditQueryStatementBuilder { auditSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetryaudit") @@ -48,6 +50,7 @@ func NewAuditQueryStatementBuilder( metadataStore, fullTextColumn, jsonKeyToKey, + flagger, ) return &auditQueryStatementBuilder{ @@ -319,7 +322,7 @@ func (b *auditQueryStatementBuilder) buildTimeSeriesQuery( fieldNames := make([]string, 0, len(query.GroupBy)) for _, gb := range query.GroupBy { - expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey) + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey, false) if err != nil { return nil, err } @@ -456,7 +459,7 @@ func (b *auditQueryStatementBuilder) buildScalarQuery( var allGroupByArgs []any for _, gb := range query.GroupBy { - expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey) + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey, false) if err != nil { return nil, err } diff --git a/pkg/telemetryaudit/statement_builder_test.go b/pkg/telemetryaudit/statement_builder_test.go index f637b17fa5..01b9757b63 100644 --- a/pkg/telemetryaudit/statement_builder_test.go +++ b/pkg/telemetryaudit/statement_builder_test.go @@ -10,6 +10,7 @@ import ( qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/stretchr/testify/require" ) @@ -46,13 +47,15 @@ func auditFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey { } } -func newTestAuditStatementBuilder() *auditQueryStatementBuilder { +func newTestAuditStatementBuilder(t *testing.T) *auditQueryStatementBuilder { + t.Helper() + fl := flaggertest.New(t) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = auditFieldKeyMap() fm := NewFieldMapper() cb := NewConditionBuilder(fm) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) return NewAuditQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -62,11 +65,12 @@ func newTestAuditStatementBuilder() *auditQueryStatementBuilder { aggExprRewriter, DefaultFullTextColumn, nil, + fl, ) } func TestStatementBuilder(t *testing.T) { - statementBuilder := newTestAuditStatementBuilder() + statementBuilder := newTestAuditStatementBuilder(t) ctx := context.Background() testCases := []struct { diff --git a/pkg/telemetrylogs/condition_builder.go b/pkg/telemetrylogs/condition_builder.go index 62a57a7a73..f40a81fd94 100644 --- a/pkg/telemetrylogs/condition_builder.go +++ b/pkg/telemetrylogs/condition_builder.go @@ -6,19 +6,23 @@ import ( schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" + "github.com/SigNoz/signoz/pkg/types/featuretypes" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" "github.com/huandu/go-sqlbuilder" ) type conditionBuilder struct { fm qbtypes.FieldMapper + fl flagger.Flagger } -func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder { - return &conditionBuilder{fm: fm} +func NewConditionBuilder(fm qbtypes.FieldMapper, fl flagger.Flagger) *conditionBuilder { + return &conditionBuilder{fm: fm, fl: fl} } func (c *conditionBuilder) conditionFor( @@ -36,7 +40,8 @@ func (c *conditionBuilder) conditionFor( // TODO(Piyush): Update this to support multiple JSON columns based on evolutions for _, column := range columns { - if column.Type.GetType() == schema.ColumnTypeEnumJSON && querybuilder.BodyJSONQueryEnabled && key.Name != messageSubField { + // TODO(Tushar): thread orgID here to evaluate correctly + if column.Type.GetType() == schema.ColumnTypeEnumJSON && c.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) && key.Name != messageSubField { valueType, value := InferDataType(value, operator, key) cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb) if err != nil { @@ -56,7 +61,8 @@ func (c *conditionBuilder) conditionFor( } // Check if this is a body JSON search - either by FieldContext - if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if key.FieldContext == telemetrytypes.FieldContextBody && !c.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { fieldExpression, value = GetBodyJSONKey(ctx, key, operator, value) } @@ -167,7 +173,8 @@ func (c *conditionBuilder) conditionFor( // in the UI based query builder, `exists` and `not exists` are used for // key membership checks, so depending on the column type, the condition changes case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists: - if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if key.FieldContext == telemetrytypes.FieldContextBody && !c.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { if operator == qbtypes.FilterOperatorExists { return GetBodyJSONKeyForExists(ctx, key, operator, value), nil } else { @@ -287,7 +294,8 @@ func (c *conditionBuilder) ConditionFor( case telemetrytypes.FieldContextBody: // Querying JSON fields already account for Nullability of fields // so additional exists checks are not needed - if querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if c.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { return condition, nil } } diff --git a/pkg/telemetrylogs/condition_builder_test.go b/pkg/telemetrylogs/condition_builder_test.go index f430d0d2cd..9b7ab55727 100644 --- a/pkg/telemetrylogs/condition_builder_test.go +++ b/pkg/telemetrylogs/condition_builder_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/huandu/go-sqlbuilder" @@ -122,8 +123,9 @@ func TestExistsConditionForWithEvolutions(t *testing.T) { expectedError: nil, }, } - fm := NewFieldMapper() - conditionBuilder := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + conditionBuilder := NewConditionBuilder(fm, fl) ctx := context.Background() for _, tc := range testCases { @@ -513,8 +515,9 @@ func TestConditionFor(t *testing.T) { expectedError: qbtypes.ErrColumnNotFound, }, } - fm := NewFieldMapper() - conditionBuilder := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + conditionBuilder := NewConditionBuilder(fm, fl) for _, tc := range testCases { sb := sqlbuilder.NewSelectBuilder() t.Run(tc.name, func(t *testing.T) { @@ -566,8 +569,9 @@ func TestConditionForMultipleKeys(t *testing.T) { }, } - fm := NewFieldMapper() - conditionBuilder := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + conditionBuilder := NewConditionBuilder(fm, fl) for _, tc := range testCases { sb := sqlbuilder.NewSelectBuilder() @@ -825,8 +829,9 @@ func TestConditionForJSONBodySearch(t *testing.T) { }, } - fm := NewFieldMapper() - conditionBuilder := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + conditionBuilder := NewConditionBuilder(fm, fl) for _, tc := range testCases { sb := sqlbuilder.NewSelectBuilder() diff --git a/pkg/telemetrylogs/const.go b/pkg/telemetrylogs/const.go index 8340fc1c02..24f7b32eb7 100644 --- a/pkg/telemetrylogs/const.go +++ b/pkg/telemetrylogs/const.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/SigNoz/signoz-otel-collector/constants" - "github.com/SigNoz/signoz/pkg/querybuilder" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" ) @@ -41,7 +40,7 @@ const ( BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix // messageSubColumn is the ClickHouse sub-column that body searches map to - // when BodyJSONQueryEnabled is true. + // when use_json_body feature flag is true. messageSubField = "message" messageSubColumn = "body_v2.message" bodySearchDefaultWarning = "body searches default to `body.message:string`. Use `body.` to search a different field inside body" @@ -128,8 +127,8 @@ var ( } ) -func bodyAliasExpression() string { - if !querybuilder.BodyJSONQueryEnabled { +func bodyAliasExpression(bodyJSONEnabled bool) string { + if !bodyJSONEnabled { return LogsV2BodyColumn } diff --git a/pkg/telemetrylogs/field_mapper.go b/pkg/telemetrylogs/field_mapper.go index 6a04c46617..843576b32b 100644 --- a/pkg/telemetrylogs/field_mapper.go +++ b/pkg/telemetrylogs/field_mapper.go @@ -12,9 +12,11 @@ import ( schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" "github.com/SigNoz/signoz-otel-collector/utils" "github.com/SigNoz/signoz/pkg/errors" - "github.com/SigNoz/signoz/pkg/querybuilder" + "github.com/SigNoz/signoz/pkg/flagger" + "github.com/SigNoz/signoz/pkg/types/featuretypes" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" "github.com/huandu/go-sqlbuilder" "golang.org/x/exp/maps" @@ -66,13 +68,15 @@ var ( } ) -type fieldMapper struct{} - -func NewFieldMapper() qbtypes.FieldMapper { - return &fieldMapper{} +type fieldMapper struct { + fl flagger.Flagger } -func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) { +func NewFieldMapper(fl flagger.Flagger) qbtypes.FieldMapper { + return &fieldMapper{fl: fl} +} + +func (m *fieldMapper) getColumn(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) { switch key.FieldContext { case telemetrytypes.FieldContextResource: columns := []*schema.Column{logsV2Columns["resources_string"], logsV2Columns["resource"]} @@ -96,7 +100,8 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry } case telemetrytypes.FieldContextBody: // Body context is for JSON body fields. Use body_v2 if feature flag is enabled. - if querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if m.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { if key.Name == messageSubField { return []*schema.Column{logsV2Columns[messageSubColumn]}, nil } @@ -105,7 +110,8 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry // Fall back to legacy body column return []*schema.Column{logsV2Columns["body"]}, nil case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified: - if key.Name == LogsV2BodyColumn && querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if key.Name == LogsV2BodyColumn && m.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { return []*schema.Column{logsV2Columns[messageSubColumn]}, nil } col, ok := logsV2Columns[key.Name] @@ -113,7 +119,8 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry // check if the key has body JSON search if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) { // Use body_v2 if feature flag is enabled and we have a body condition builder - if querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if m.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { // TODO(Piyush): Update this to support multiple JSON columns based on evolutions // i.e return both the body json and body json promoted and let the evolutions decide which one to use // based on the query range time. diff --git a/pkg/telemetrylogs/field_mapper_test.go b/pkg/telemetrylogs/field_mapper_test.go index 896869855e..788f53be46 100644 --- a/pkg/telemetrylogs/field_mapper_test.go +++ b/pkg/telemetrylogs/field_mapper_test.go @@ -6,6 +6,7 @@ import ( "time" schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/stretchr/testify/assert" @@ -165,7 +166,8 @@ func TestGetColumn(t *testing.T) { }, } - fm := NewFieldMapper() + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { @@ -273,7 +275,8 @@ func TestGetFieldKeyName(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - fm := NewFieldMapper() + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) result, err := fm.FieldFor(ctx, 0, 0, &tc.key) if tc.expectedError != nil { @@ -514,7 +517,8 @@ func TestFieldForWithEvolutions(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - fm := NewFieldMapper() + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) tsStart := uint64(tc.tsStartTime.UnixNano()) tsEnd := uint64(tc.tsEndTime.UnixNano()) @@ -963,7 +967,8 @@ func TestFieldForWithMaterialized(t *testing.T) { }, } - fm := NewFieldMapper() + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { diff --git a/pkg/telemetrylogs/filter_expr_like_warning_test.go b/pkg/telemetrylogs/filter_expr_like_warning_test.go index 4db0d1dc96..628f6535f0 100644 --- a/pkg/telemetrylogs/filter_expr_like_warning_test.go +++ b/pkg/telemetrylogs/filter_expr_like_warning_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/stretchr/testify/require" @@ -12,9 +13,10 @@ import ( // TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL. func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) { + fl := flaggertest.New(t) ctx := context.Background() - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) keys := buildCompleteFieldKeyMap(releaseTime) @@ -51,8 +53,9 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) { // TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings. func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) { - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) keys := buildCompleteFieldKeyMap(releaseTime) diff --git a/pkg/telemetrylogs/filter_expr_logs_body_json_test.go b/pkg/telemetrylogs/filter_expr_logs_body_json_test.go index 9b2f3e21b3..59ef12f63f 100644 --- a/pkg/telemetrylogs/filter_expr_logs_body_json_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_body_json_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" @@ -15,8 +16,9 @@ import ( // TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search. func TestFilterExprLogsBodyJSON(t *testing.T) { - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) // Define a comprehensive set of field keys to support all test cases releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) keys := buildCompleteFieldKeyMap(releaseTime) @@ -27,10 +29,8 @@ func TestFilterExprLogsBodyJSON(t *testing.T) { FieldMapper: fm, ConditionBuilder: cb, FieldKeys: keys, - FullTextColumn: &telemetrytypes.TelemetryFieldKey{ - Name: "body", - }, - JsonKeyToKey: GetBodyJSONKey, + FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "body"}, + JsonKeyToKey: GetBodyJSONKey, } testCases := []struct { diff --git a/pkg/telemetrylogs/filter_expr_logs_test.go b/pkg/telemetrylogs/filter_expr_logs_test.go index fe1a2321a9..cc204e07da 100644 --- a/pkg/telemetrylogs/filter_expr_logs_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_test.go @@ -8,6 +8,7 @@ import ( "time" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" @@ -17,10 +18,11 @@ import ( // TestFilterExprLogs tests a comprehensive set of query patterns for logs search. func TestFilterExprLogs(t *testing.T) { + fl := flaggertest.New(t) releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) ctx := context.Background() - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) // Define a comprehensive set of field keys to support all test cases keys := buildCompleteFieldKeyMap(releaseTime) @@ -2429,8 +2431,9 @@ func TestFilterExprLogs(t *testing.T) { // TestFilterExprLogs tests a comprehensive set of query patterns for logs search. func TestFilterExprLogsConflictNegation(t *testing.T) { - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) // Define a comprehensive set of field keys to support all test cases releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) diff --git a/pkg/telemetrylogs/json_stmt_builder_test.go b/pkg/telemetrylogs/json_stmt_builder_test.go index 739f4841d4..b9f7d56eb8 100644 --- a/pkg/telemetrylogs/json_stmt_builder_test.go +++ b/pkg/telemetrylogs/json_stmt_builder_test.go @@ -8,6 +8,7 @@ import ( "time" schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" @@ -32,9 +33,6 @@ func (t TestExpected) GetQuery() string { } func TestJSONStmtBuilder_TimeSeries(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() statementBuilder := buildJSONTestStatementBuilder(t, false) cases := []struct { @@ -115,9 +113,6 @@ func TestJSONStmtBuilder_TimeSeries(t *testing.T) { not a body_promoted.* column. These tests assumed the old coalesce(body_promoted.x, body_v2.x) path. func TestStmtBuilderTimeSeriesBodyGroupByPromoted(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() statementBuilder := buildJSONTestStatementBuilder(t, "user.age", "user.name") cases := []struct { @@ -176,10 +171,6 @@ func TestStmtBuilderTimeSeriesBodyGroupByPromoted(t *testing.T) { */ func TestJSONStmtBuilder_PrimitivePaths(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() - statementBuilder := buildJSONTestStatementBuilder(t, false) cases := []struct { name string @@ -340,10 +331,6 @@ func TestJSONStmtBuilder_PrimitivePaths(t *testing.T) { (direct sub-column access), not a body_promoted.* column. func TestStatementBuilderListQueryBodyPromoted(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() - statementBuilder := buildJSONTestStatementBuilder(t, "education", "tags") cases := []struct { name string @@ -507,10 +494,6 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) { */ func TestJSONStmtBuilder_ArrayPaths(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() - statementBuilder := buildJSONTestStatementBuilder(t, false) cases := []struct { name string @@ -816,10 +799,6 @@ func TestJSONStmtBuilder_ArrayPaths(t *testing.T) { } func TestJSONStmtBuilder_IndexedPaths(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() - statementBuilder := buildJSONTestStatementBuilder(t, true) cases := []struct { name string @@ -939,9 +918,6 @@ func TestJSONStmtBuilder_IndexedPaths(t *testing.T) { } func TestJSONStmtBuilder_SelectField(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() statementBuilder := buildJSONTestStatementBuilder(t, false) cases := []struct { @@ -1030,9 +1006,6 @@ func TestJSONStmtBuilder_SelectField(t *testing.T) { } func TestJSONStmtBuilder_OrderBy(t *testing.T) { - enable, disable := jsonQueryTestUtil(t) - enable() - defer disable() statementBuilder := buildJSONTestStatementBuilder(t, false) cases := []struct { @@ -1151,11 +1124,14 @@ func buildTestTelemetryMetadataStore(t *testing.T, addIndexes bool) *telemetryty } func buildJSONTestStatementBuilder(t *testing.T, addIndexes bool) *logQueryStatementBuilder { - mockMetadataStore := buildTestTelemetryMetadataStore(t, addIndexes) - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + t.Helper() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + mockMetadataStore := buildTestTelemetryMetadataStore(t, addIndexes) + fl := flaggertest.WithUseJSONBody(t, true) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) + + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -1165,18 +1141,8 @@ func buildJSONTestStatementBuilder(t *testing.T, addIndexes bool) *logQueryState aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) return statementBuilder } - -func jsonQueryTestUtil(_ *testing.T) (func(), func()) { - enable := func() { - querybuilder.BodyJSONQueryEnabled = true - } - disable := func() { - querybuilder.BodyJSONQueryEnabled = false - } - - return enable, disable -} diff --git a/pkg/telemetrylogs/statement_builder.go b/pkg/telemetrylogs/statement_builder.go index 21853d1280..a4fb92e4ca 100644 --- a/pkg/telemetrylogs/statement_builder.go +++ b/pkg/telemetrylogs/statement_builder.go @@ -8,10 +8,13 @@ import ( "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/telemetryresourcefilter" + "github.com/SigNoz/signoz/pkg/types/featuretypes" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" "github.com/huandu/go-sqlbuilder" ) @@ -22,6 +25,7 @@ type logQueryStatementBuilder struct { cb qbtypes.ConditionBuilder resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation] aggExprRewriter qbtypes.AggExprRewriter + fl flagger.Flagger fullTextColumn *telemetrytypes.TelemetryFieldKey jsonKeyToKey qbtypes.JsonKeyToFieldFunc @@ -37,6 +41,7 @@ func NewLogQueryStatementBuilder( aggExprRewriter qbtypes.AggExprRewriter, fullTextColumn *telemetrytypes.TelemetryFieldKey, jsonKeyToKey qbtypes.JsonKeyToFieldFunc, + fl flagger.Flagger, ) *logQueryStatementBuilder { logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs") @@ -49,6 +54,7 @@ func NewLogQueryStatementBuilder( metadataStore, fullTextColumn, jsonKeyToKey, + fl, ) return &logQueryStatementBuilder{ @@ -58,6 +64,7 @@ func NewLogQueryStatementBuilder( cb: conditionBuilder, resourceFilterStmtBuilder: resourceFilterStmtBuilder, aggExprRewriter: aggExprRewriter, + fl: fl, fullTextColumn: fullTextColumn, jsonKeyToKey: jsonKeyToKey, } @@ -75,8 +82,10 @@ func (b *logQueryStatementBuilder) Build( start = querybuilder.ToNanoSecs(start) end = querybuilder.ToNanoSecs(end) + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled := b.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) - keySelectors, warnings := getKeySelectors(query) + keySelectors, warnings := getKeySelectors(query, bodyJSONEnabled) keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors) if err != nil { return nil, err @@ -107,7 +116,7 @@ func (b *logQueryStatementBuilder) Build( return stmt, nil } -func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) ([]*telemetrytypes.FieldKeySelector, []string) { +func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], bodyJSONEnabled bool) ([]*telemetrytypes.FieldKeySelector, []string) { var keySelectors []*telemetrytypes.FieldKeySelector var warnings []string @@ -159,7 +168,7 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) ([ // When the new JSON body experience is enabled, warn the user if they use the bare // "body" key in the filter — queries on plain "body" default to body.message:string. // TODO(Piyush): Setup better for coming FTS support. - if querybuilder.BodyJSONQueryEnabled { + if bodyJSONEnabled { for _, sel := range keySelectors { if sel.Name == LogsV2BodyColumn { warnings = append(warnings, bodySearchDefaultWarning) @@ -258,6 +267,8 @@ func (b *logQueryStatementBuilder) buildListQuery( var ( cteFragments []string cteArgs [][]any + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled = b.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) ) if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil { @@ -279,7 +290,7 @@ func (b *logQueryStatementBuilder) buildListQuery( sb.SelectMore(LogsV2SeverityNumberColumn) sb.SelectMore(LogsV2ScopeNameColumn) sb.SelectMore(LogsV2ScopeVersionColumn) - sb.SelectMore(bodyAliasExpression()) + sb.SelectMore(bodyAliasExpression(bodyJSONEnabled)) sb.SelectMore(LogsV2AttributesStringColumn) sb.SelectMore(LogsV2AttributesNumberColumn) sb.SelectMore(LogsV2AttributesBoolColumn) @@ -360,6 +371,8 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery( var ( cteFragments []string cteArgs [][]any + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled = b.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) ) if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil { @@ -379,7 +392,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery( // Keep original column expressions so we can build the tuple fieldNames := make([]string, 0, len(query.GroupBy)) for _, gb := range query.GroupBy { - expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey) + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey, bodyJSONEnabled) if err != nil { return nil, err } @@ -518,6 +531,8 @@ func (b *logQueryStatementBuilder) buildScalarQuery( var ( cteFragments []string cteArgs [][]any + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled = b.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) ) if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil { @@ -532,7 +547,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery( var allGroupByArgs []any for _, gb := range query.GroupBy { - expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey) + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey, bodyJSONEnabled) if err != nil { return nil, err } @@ -635,6 +650,8 @@ func (b *logQueryStatementBuilder) addFilterCondition( var preparedWhereClause *querybuilder.PreparedWhereClause var err error + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled := b.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) if query.Filter != nil && query.Filter.Expression != "" { // add filter expression @@ -644,6 +661,7 @@ func (b *logQueryStatementBuilder) addFilterCondition( FieldMapper: b.fm, ConditionBuilder: b.cb, FieldKeys: keys, + BodyJSONEnabled: bodyJSONEnabled, SkipResourceFilter: true, FullTextColumn: b.fullTextColumn, JsonKeyToKey: b.jsonKeyToKey, diff --git a/pkg/telemetrylogs/stmt_builder_test.go b/pkg/telemetrylogs/stmt_builder_test.go index e394cca6f0..480bb21de7 100644 --- a/pkg/telemetrylogs/stmt_builder_test.go +++ b/pkg/telemetrylogs/stmt_builder_test.go @@ -6,6 +6,7 @@ import ( "time" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" @@ -15,7 +16,6 @@ import ( ) func TestStatementBuilderTimeSeries(t *testing.T) { - // Create a test release time releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) releaseTimeNano := uint64(releaseTime.UnixNano()) @@ -191,16 +191,17 @@ func TestStatementBuilderTimeSeries(t *testing.T) { } ctx := context.Background() + fl := flaggertest.New(t) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() keysMap := buildCompleteFieldKeyMap(releaseTime) mockMetadataStore.KeysMap = keysMap - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -210,6 +211,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) for _, c := range cases { @@ -313,15 +315,16 @@ func TestStatementBuilderListQuery(t *testing.T) { } ctx := context.Background() + fl := flaggertest.New(t) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() - fm := NewFieldMapper() + fm := NewFieldMapper(fl) // Create a test release time releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) mockMetadataStore.KeysMap = buildCompleteFieldKeyMap(releaseTime) - cb := NewConditionBuilder(fm) + cb := NewConditionBuilder(fm, fl) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -331,6 +334,7 @@ func TestStatementBuilderListQuery(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) for _, c := range cases { @@ -454,14 +458,15 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) { } ctx := context.Background() + fl := flaggertest.New(t) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() - fm := NewFieldMapper() + fm := NewFieldMapper(fl) // Create a test release time releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) mockMetadataStore.KeysMap = buildCompleteFieldKeyMap(releaseTime) - cb := NewConditionBuilder(fm) + cb := NewConditionBuilder(fm, fl) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -471,6 +476,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) for _, c := range cases { @@ -528,14 +534,15 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) { } ctx := context.Background() + fl := flaggertest.New(t) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() - fm := NewFieldMapper() + fm := NewFieldMapper(fl) // Create a test release time releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC) mockMetadataStore.KeysMap = buildCompleteFieldKeyMap(releaseTime) - cb := NewConditionBuilder(fm) + cb := NewConditionBuilder(fm, fl) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -545,6 +552,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) for _, c := range cases { @@ -624,11 +632,12 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) { ctx := context.Background() mockMetadataStore := telemetrytypestest.NewMockMetadataStore() - fm := NewFieldMapper() + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision() - cb := NewConditionBuilder(fm) + cb := NewConditionBuilder(fm, fl) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -638,6 +647,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) for _, c := range cases { @@ -845,12 +855,13 @@ func TestAdjustKey(t *testing.T) { }, } - fm := NewFieldMapper() + fl := flaggertest.New(t) + fm := NewFieldMapper(fl) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision() - cb := NewConditionBuilder(fm) + cb := NewConditionBuilder(fm, fl) - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -860,6 +871,7 @@ func TestAdjustKey(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) for _, c := range cases { @@ -885,7 +897,7 @@ func TestStmtBuilderBodyField(t *testing.T) { name string requestType qbtypes.RequestType query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] - enableBodyJSONQuery bool + enableUseJSONBody bool expected qbtypes.Statement expectedErr error }{ @@ -897,7 +909,7 @@ func TestStmtBuilderBodyField(t *testing.T) { Filter: &qbtypes.Filter{Expression: "body Exists"}, Limit: 10, }, - enableBodyJSONQuery: true, + enableUseJSONBody: true, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE body_v2.message <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -913,7 +925,7 @@ func TestStmtBuilderBodyField(t *testing.T) { Filter: &qbtypes.Filter{Expression: "body Exists"}, Limit: 10, }, - enableBodyJSONQuery: false, + enableUseJSONBody: false, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -928,7 +940,7 @@ func TestStmtBuilderBodyField(t *testing.T) { Filter: &qbtypes.Filter{Expression: "body == ''"}, Limit: 10, }, - enableBodyJSONQuery: true, + enableUseJSONBody: true, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE body_v2.message = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -944,7 +956,7 @@ func TestStmtBuilderBodyField(t *testing.T) { Filter: &qbtypes.Filter{Expression: "body == ''"}, Limit: 10, }, - enableBodyJSONQuery: false, + enableUseJSONBody: false, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -959,7 +971,7 @@ func TestStmtBuilderBodyField(t *testing.T) { Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"}, Limit: 10, }, - enableBodyJSONQuery: true, + enableUseJSONBody: true, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE LOWER(body_v2.message) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -975,7 +987,7 @@ func TestStmtBuilderBodyField(t *testing.T) { Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"}, Limit: 10, }, - enableBodyJSONQuery: false, + enableUseJSONBody: false, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -984,25 +996,18 @@ func TestStmtBuilderBodyField(t *testing.T) { }, } - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) - - enable, disable := jsonQueryTestUtil(t) - defer disable() for _, c := range cases { t.Run(c.name, func(t *testing.T) { - if c.enableBodyJSONQuery { - enable() - } else { - disable() - } - // build the key map after enabling/disabling body JSON query + fl := flaggertest.WithUseJSONBody(t, c.enableUseJSONBody) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) + // build the key map mockMetadataStore := telemetrytypestest.NewMockMetadataStore() for _, field := range IntrinsicFields { f := field mockMetadataStore.KeysMap[field.Name] = append(mockMetadataStore.KeysMap[field.Name], &f) } - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), mockMetadataStore, @@ -1011,6 +1016,7 @@ func TestStmtBuilderBodyField(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil) @@ -1036,7 +1042,7 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) { name string requestType qbtypes.RequestType query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] - enableBodyJSONQuery bool + enableUseJSONBody bool expected qbtypes.Statement expectedErr error }{ @@ -1048,7 +1054,7 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) { Filter: &qbtypes.Filter{Expression: "'error'"}, Limit: 10, }, - enableBodyJSONQuery: true, + enableUseJSONBody: true, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -1063,7 +1069,7 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) { Filter: &qbtypes.Filter{Expression: "'error'"}, Limit: 10, }, - enableBodyJSONQuery: false, + enableUseJSONBody: false, expected: qbtypes.Statement{ Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, @@ -1072,25 +1078,18 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) { }, } - fm := NewFieldMapper() - cb := NewConditionBuilder(fm) - - enable, disable := jsonQueryTestUtil(t) - defer disable() for _, c := range cases { t.Run(c.name, func(t *testing.T) { - if c.enableBodyJSONQuery { - enable() - } else { - disable() - } - // build the key map after enabling/disabling body JSON query + fl := flaggertest.WithUseJSONBody(t, c.enableUseJSONBody) + fm := NewFieldMapper(fl) + cb := NewConditionBuilder(fm, fl) + // build the key map mockMetadataStore := telemetrytypestest.NewMockMetadataStore() for _, field := range IntrinsicFields { f := field mockMetadataStore.KeysMap[field.Name] = append(mockMetadataStore.KeysMap[field.Name], &f) } - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewLogQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), mockMetadataStore, @@ -1099,6 +1098,7 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) { aggExprRewriter, DefaultFullTextColumn, GetBodyJSONKey, + fl, ) q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil) diff --git a/pkg/telemetrymetadata/metadata.go b/pkg/telemetrymetadata/metadata.go index a2041d1909..fdc35b7c25 100644 --- a/pkg/telemetrymetadata/metadata.go +++ b/pkg/telemetrymetadata/metadata.go @@ -12,6 +12,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/telemetryaudit" "github.com/SigNoz/signoz/pkg/telemetrylogs" @@ -19,10 +20,12 @@ import ( "github.com/SigNoz/signoz/pkg/telemetrystore" "github.com/SigNoz/signoz/pkg/telemetrytraces" "github.com/SigNoz/signoz/pkg/types/ctxtypes" + "github.com/SigNoz/signoz/pkg/types/featuretypes" "github.com/SigNoz/signoz/pkg/types/instrumentationtypes" "github.com/SigNoz/signoz/pkg/types/metrictypes" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" ) var ( @@ -63,6 +66,7 @@ type telemetryMetaStore struct { fm qbtypes.FieldMapper conditionBuilder qbtypes.ConditionBuilder + fl flagger.Flagger jsonColumnMetadata map[telemetrytypes.Signal]map[telemetrytypes.FieldContext]telemetrytypes.JSONColumnMetadata } @@ -94,9 +98,13 @@ func NewTelemetryMetaStore( relatedMetadataDBName string, relatedMetadataTblName string, columnEvolutionMetadataTblName string, + fl flagger.Flagger, ) telemetrytypes.MetadataStore { metadataSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrymetadata") + fm := NewFieldMapper() + conditionBuilder := NewConditionBuilder(fm) + t := &telemetryMetaStore{ logger: metadataSettings.Logger(), telemetrystore: telemetrystore, @@ -129,14 +137,11 @@ func NewTelemetryMetaStore( }, }, }, + fl: fl, + fm: fm, + conditionBuilder: conditionBuilder, } - fm := NewFieldMapper() - conditionBuilder := NewConditionBuilder(fm) - - t.fm = fm - t.conditionBuilder = conditionBuilder - return t } @@ -416,7 +421,8 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors } // body keys are gated behind the feature flag - queryBodyTable = queryBodyTable && querybuilder.BodyJSONQueryEnabled + // TODO(Tushar): thread orgID here to evaluate correctly + queryBodyTable = queryBodyTable && t.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) // requestedFieldKeySelectors is the set of names the user explicitly asked for. // Used to ensure a name that is both a parent path AND a directly requested field still surfaces @@ -676,7 +682,8 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors } // enrich body keys with promoted paths, indexes, and JSON access plans - if querybuilder.BodyJSONQueryEnabled { + // TODO(Tushar): thread orgID here to evaluate correctly + if t.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) { if err := t.enrichJSONKeys(ctx, fieldKeySelectors, keys, parentTypes); err != nil { return nil, false, err } @@ -1360,6 +1367,9 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel instrumentationtypes.CodeFunctionName: "getRelatedValues", }) + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled := t.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) + // nothing to return as "related" value if there is nothing to filter on if fieldValueSelector.ExistingQuery == "" { return nil, true, nil @@ -1409,6 +1419,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel FieldMapper: t.fm, ConditionBuilder: t.conditionBuilder, FieldKeys: keys, + BodyJSONEnabled: bodyJSONEnabled, }) if err != nil { t.logger.WarnContext(ctx, "error parsing existing query for related values", errors.Attr(err)) diff --git a/pkg/telemetrymetadata/metadata_query_test.go b/pkg/telemetrymetadata/metadata_query_test.go index 308c2a689e..bf379e4e69 100644 --- a/pkg/telemetrymetadata/metadata_query_test.go +++ b/pkg/telemetrymetadata/metadata_query_test.go @@ -15,6 +15,7 @@ import ( "github.com/SigNoz/signoz/pkg/types/telemetrytypes" cmock "github.com/srikanthccv/ClickHouse-go-mock" "github.com/stretchr/testify/assert" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/stretchr/testify/require" ) @@ -46,6 +47,7 @@ func TestGetFirstSeenFromMetricMetadata(t *testing.T) { DBName, AttributesMetadataLocalTableName, ColumnEvolutionMetadataTableName, + flaggertest.New(t), ) lookupKeys := []telemetrytypes.MetricMetadataLookupKey{ diff --git a/pkg/telemetrymetadata/metadata_test.go b/pkg/telemetrymetadata/metadata_test.go index 498a514706..9459cb3b2a 100644 --- a/pkg/telemetrymetadata/metadata_test.go +++ b/pkg/telemetrymetadata/metadata_test.go @@ -17,10 +17,12 @@ import ( "github.com/SigNoz/signoz/pkg/types/telemetrytypes" cmock "github.com/srikanthccv/ClickHouse-go-mock" "github.com/stretchr/testify/assert" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/stretchr/testify/require" ) -func newTestTelemetryMetaStoreTestHelper(store telemetrystore.TelemetryStore) telemetrytypes.MetadataStore { +func newTestTelemetryMetaStoreTestHelper(t *testing.T, store telemetrystore.TelemetryStore) telemetrytypes.MetadataStore { + t.Helper() return NewTelemetryMetaStore( instrumentationtest.New().ToProviderSettings(), store, @@ -45,6 +47,7 @@ func newTestTelemetryMetaStoreTestHelper(store telemetrystore.TelemetryStore) te DBName, AttributesMetadataLocalTableName, ColumnEvolutionMetadataTableName, + flaggertest.New(t), ) } @@ -66,7 +69,7 @@ func TestGetKeys(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) rows := cmock.NewRows([]cmock.ColumnType{ {Name: "statement", Type: "String"}, @@ -176,7 +179,7 @@ func TestApplyBackwardCompatibleKeys(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) hasTraces := false hasLogs := false @@ -340,7 +343,7 @@ func TestGetMetricFieldValuesIntrinsicMetricName(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) valueRows := cmock.NewRows([]cmock.ColumnType{ {Name: "metric_name", Type: "String"}, @@ -379,7 +382,7 @@ func TestGetMetricFieldValuesIntrinsicBoolReturnsEmpty(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) metadataRows := cmock.NewRows([]cmock.ColumnType{ {Name: "attr_string_value", Type: "String"}, @@ -411,7 +414,7 @@ func TestGetMetricFieldValuesAppliesMetricNamespace(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) valueRows := cmock.NewRows([]cmock.ColumnType{ {Name: "attr_string_value", Type: "String"}, @@ -443,7 +446,7 @@ func TestGetMetricFieldValuesIntrinsicMetricNameAppliesMetricNamespace(t *testin mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) valueRows := cmock.NewRows([]cmock.ColumnType{ {Name: "metric_name", Type: "String"}, @@ -483,7 +486,7 @@ func TestGetMeterSourceMetricFieldValuesAppliesMetricNamespace(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) rows := cmock.NewRows([]cmock.ColumnType{ {Name: "attr", Type: "Array(String)"}, @@ -514,7 +517,7 @@ func TestGetMetricsKeysAppliesMetricNamespace(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) rows := cmock.NewRows([]cmock.ColumnType{ {Name: "name", Type: "String"}, @@ -549,7 +552,7 @@ func TestGetMeterSourceMetricKeysAppliesMetricNamespace(t *testing.T) { mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{}) mock := mockTelemetryStore.Mock() - metadata := newTestTelemetryMetaStoreTestHelper(mockTelemetryStore) + metadata := newTestTelemetryMetaStoreTestHelper(t, mockTelemetryStore) rows := cmock.NewRows([]cmock.ColumnType{ {Name: "attr_name", Type: "String"}, diff --git a/pkg/telemetrymeter/stmt_builder_test.go b/pkg/telemetrymeter/stmt_builder_test.go index 2b2c39bd7d..b1569082b2 100644 --- a/pkg/telemetrymeter/stmt_builder_test.go +++ b/pkg/telemetrymeter/stmt_builder_test.go @@ -5,7 +5,7 @@ import ( "testing" "time" - "github.com/SigNoz/signoz/pkg/flagger" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/telemetrymetrics" "github.com/SigNoz/signoz/pkg/types/metrictypes" @@ -166,10 +166,7 @@ func TestStatementBuilder(t *testing.T) { } mockMetadataStore.KeysMap = keys - flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry()) - if err != nil { - t.Fatalf("failed to create flagger: %v", err) - } + flagger := flaggertest.New(t) metricStmtBuilder := telemetrymetrics.NewMetricQueryStatementBuilder(instrumentationtest.New().ToProviderSettings(), mockMetadataStore, fm, cb, flagger) diff --git a/pkg/telemetryresourcefilter/statement_builder.go b/pkg/telemetryresourcefilter/statement_builder.go index 29d92c96d2..92c8f08f29 100644 --- a/pkg/telemetryresourcefilter/statement_builder.go +++ b/pkg/telemetryresourcefilter/statement_builder.go @@ -6,9 +6,12 @@ import ( "log/slog" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" + "github.com/SigNoz/signoz/pkg/types/featuretypes" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" "github.com/huandu/go-sqlbuilder" ) @@ -22,6 +25,7 @@ type resourceFilterStatementBuilder[T any] struct { metadataStore telemetrytypes.MetadataStore signal telemetrytypes.Signal source telemetrytypes.Source + flagger flagger.Flagger fullTextColumn *telemetrytypes.TelemetryFieldKey jsonKeyToKey qbtypes.JsonKeyToFieldFunc @@ -42,6 +46,7 @@ func New[T any]( metadataStore telemetrytypes.MetadataStore, fullTextColumn *telemetrytypes.TelemetryFieldKey, jsonKeyToKey qbtypes.JsonKeyToFieldFunc, + fl flagger.Flagger, ) *resourceFilterStatementBuilder[T] { set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetryresourcefilter") fm := NewFieldMapper() @@ -55,6 +60,7 @@ func New[T any]( metadataStore: metadataStore, signal: signal, source: source, + flagger: fl, fullTextColumn: fullTextColumn, jsonKeyToKey: jsonKeyToKey, } @@ -128,6 +134,10 @@ func (b *resourceFilterStatementBuilder[T]) addConditions( keys map[string][]*telemetrytypes.TelemetryFieldKey, variables map[string]qbtypes.VariableItem, ) (bool, error) { + + // TODO(Tushar): thread orgID here to evaluate correctly + bodyJSONEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) + // Add filter condition if present if query.Filter != nil && query.Filter.Expression != "" { @@ -138,6 +148,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions( FieldMapper: b.fieldMapper, ConditionBuilder: b.conditionBuilder, FieldKeys: keys, + BodyJSONEnabled: bodyJSONEnabled, FullTextColumn: b.fullTextColumn, JsonKeyToKey: b.jsonKeyToKey, SkipFullTextFilter: true, diff --git a/pkg/telemetryresourcefilter/statement_builder_test.go b/pkg/telemetryresourcefilter/statement_builder_test.go index 26db8a3847..fd6ef052fe 100644 --- a/pkg/telemetryresourcefilter/statement_builder_test.go +++ b/pkg/telemetryresourcefilter/statement_builder_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" @@ -361,6 +362,7 @@ func TestResourceFilterStatementBuilder_Traces(t *testing.T) { mockMetadataStore, nil, nil, + flaggertest.New(t), ) for _, c := range cases { @@ -554,6 +556,7 @@ func TestResourceFilterStatementBuilder_Logs(t *testing.T) { mockMetadataStore, nil, nil, + flaggertest.New(t), ) for _, c := range cases { @@ -621,6 +624,7 @@ func TestResourceFilterStatementBuilder_Variables(t *testing.T) { mockMetadataStore, nil, nil, + flaggertest.New(t), ) for _, c := range cases { diff --git a/pkg/telemetrytraces/statement_builder.go b/pkg/telemetrytraces/statement_builder.go index 0cd93cc4a8..ddbf0f7e2e 100644 --- a/pkg/telemetrytraces/statement_builder.go +++ b/pkg/telemetrytraces/statement_builder.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/telemetryresourcefilter" "github.com/SigNoz/signoz/pkg/telemetrystore" @@ -41,6 +42,7 @@ func NewTraceQueryStatementBuilder( conditionBuilder qbtypes.ConditionBuilder, aggExprRewriter qbtypes.AggExprRewriter, telemetryStore telemetrystore.TelemetryStore, + flagger flagger.Flagger, ) *traceQueryStatementBuilder { tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces") @@ -53,6 +55,7 @@ func NewTraceQueryStatementBuilder( metadataStore, nil, nil, + flagger, ) return &traceQueryStatementBuilder{ @@ -510,7 +513,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery( // Keep original column expressions so we can build the tuple fieldNames := make([]string, 0, len(query.GroupBy)) for _, gb := range query.GroupBy { - expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil) + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil, false) if err != nil { return nil, err } @@ -658,7 +661,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery( var allGroupByArgs []any for _, gb := range query.GroupBy { - expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil) + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil, false) if err != nil { return nil, err } diff --git a/pkg/telemetrytraces/stmt_builder_test.go b/pkg/telemetrytraces/stmt_builder_test.go index bcc642e9f0..8c4a1395e5 100644 --- a/pkg/telemetrytraces/stmt_builder_test.go +++ b/pkg/telemetrytraces/stmt_builder_test.go @@ -6,6 +6,7 @@ import ( "time" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" @@ -355,7 +356,8 @@ func TestStatementBuilder(t *testing.T) { cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -364,6 +366,7 @@ func TestStatementBuilder(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) vars := map[string]qbtypes.VariableItem{ @@ -648,7 +651,8 @@ func TestStatementBuilderListQuery(t *testing.T) { cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -657,6 +661,7 @@ func TestStatementBuilderListQuery(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) for _, c := range cases { @@ -755,7 +760,8 @@ func TestStatementBuilderListQueryWithCorruptData(t *testing.T) { if mockMetadataStore.KeysMap == nil { mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() } - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -764,6 +770,7 @@ func TestStatementBuilderListQueryWithCorruptData(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil) @@ -905,7 +912,8 @@ func TestStatementBuilderTraceQuery(t *testing.T) { cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -914,6 +922,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) for _, c := range cases { @@ -1119,7 +1128,8 @@ func TestAdjustKey(t *testing.T) { fm := NewFieldMapper() cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), mockMetadataStore, @@ -1127,6 +1137,7 @@ func TestAdjustKey(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) for _, c := range cases { @@ -1391,7 +1402,8 @@ func TestAdjustKeys(t *testing.T) { fm := NewFieldMapper() cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), mockMetadataStore, @@ -1399,6 +1411,7 @@ func TestAdjustKeys(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) for _, c := range cases { diff --git a/pkg/telemetrytraces/trace_operator_cte_builder.go b/pkg/telemetrytraces/trace_operator_cte_builder.go index 53ac2cec2c..a14f43cfe9 100644 --- a/pkg/telemetrytraces/trace_operator_cte_builder.go +++ b/pkg/telemetrytraces/trace_operator_cte_builder.go @@ -560,6 +560,7 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele keys, telemetrytypes.FieldDataTypeString, nil, + false, ) if err != nil { return nil, errors.NewInvalidInputf( @@ -676,6 +677,7 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro keys, telemetrytypes.FieldDataTypeString, nil, + false, ) if err != nil { return nil, errors.NewInvalidInputf( @@ -822,6 +824,7 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr keys, telemetrytypes.FieldDataTypeString, nil, + false, ) if err != nil { return nil, errors.NewInvalidInputf( diff --git a/pkg/telemetrytraces/trace_operator_cte_builder_test.go b/pkg/telemetrytraces/trace_operator_cte_builder_test.go index df0f51cd20..5ea759f1fa 100644 --- a/pkg/telemetrytraces/trace_operator_cte_builder_test.go +++ b/pkg/telemetrytraces/trace_operator_cte_builder_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/querybuilder" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" @@ -390,7 +391,8 @@ func TestTraceOperatorStatementBuilder(t *testing.T) { cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) traceStmtBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -399,6 +401,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) statementBuilder := NewTraceOperatorStatementBuilder( @@ -408,6 +411,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) { cb, traceStmtBuilder, aggExprRewriter, + fl, ) for _, c := range cases { @@ -503,7 +507,8 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) { cb := NewConditionBuilder(fm) mockMetadataStore := telemetrytypestest.NewMockMetadataStore() mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) traceStmtBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -512,6 +517,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) { cb, aggExprRewriter, nil, + fl, ) statementBuilder := NewTraceOperatorStatementBuilder( @@ -521,6 +527,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) { cb, traceStmtBuilder, aggExprRewriter, + fl, ) for _, c := range cases { diff --git a/pkg/telemetrytraces/trace_operator_statement_builder.go b/pkg/telemetrytraces/trace_operator_statement_builder.go index 19dfc9cff7..992b0ebc12 100644 --- a/pkg/telemetrytraces/trace_operator_statement_builder.go +++ b/pkg/telemetrytraces/trace_operator_statement_builder.go @@ -6,6 +6,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/flagger" "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/telemetryresourcefilter" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" @@ -31,6 +32,7 @@ func NewTraceOperatorStatementBuilder( conditionBuilder qbtypes.ConditionBuilder, traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation], aggExprRewriter qbtypes.AggExprRewriter, + flagger flagger.Flagger, ) *traceOperatorStatementBuilder { tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces") @@ -43,6 +45,7 @@ func NewTraceOperatorStatementBuilder( metadataStore, nil, nil, + flagger, ) return &traceOperatorStatementBuilder{ diff --git a/pkg/telemetrytraces/trace_time_range_test.go b/pkg/telemetrytraces/trace_time_range_test.go index 5e1eda53d6..3935c751c2 100644 --- a/pkg/telemetrytraces/trace_time_range_test.go +++ b/pkg/telemetrytraces/trace_time_range_test.go @@ -11,6 +11,7 @@ import ( "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest" "github.com/stretchr/testify/assert" + "github.com/SigNoz/signoz/pkg/flagger/flaggertest" "github.com/stretchr/testify/require" ) @@ -34,7 +35,8 @@ func TestTraceTimeRangeOptimization(t *testing.T) { Signal: telemetrytypes.SignalTraces, }} - aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil) + fl := flaggertest.New(t) + aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil, fl) statementBuilder := NewTraceQueryStatementBuilder( instrumentationtest.New().ToProviderSettings(), @@ -43,6 +45,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) { cb, aggExprRewriter, nil, // telemetryStore is nil - optimization won't happen but code path is tested + fl, ) tests := []struct { diff --git a/tests/integration/tests/querier_json_body/conftest.py b/tests/integration/tests/querier_json_body/conftest.py index b080f0051b..867d7aecd7 100644 --- a/tests/integration/tests/querier_json_body/conftest.py +++ b/tests/integration/tests/querier_json_body/conftest.py @@ -62,6 +62,6 @@ def signoz_json_body( pytestconfig=pytestconfig, cache_key="signoz-json-body", env_overrides={ - "BODY_JSON_QUERY_ENABLED": "true", + "SIGNOZ_FLAGGER_CONFIG_BOOLEAN_USE__JSON__BODY": True, }, ) From 9a3e79fb54b84b1fba251d321c0032d9ab1b7bcb Mon Sep 17 00:00:00 2001 From: Nikhil Soni Date: Tue, 28 Apr 2026 21:04:31 +0530 Subject: [PATCH 11/19] Add support for custom field selection for color by feature. (#11092) * feat: add customer aggregation support in waterfall * chore: add tests for aggregation logic * chore: rename analytics to aggregations * chore: update openapi specs * feat: add support to request telemetry fields in flamegraph * chore: simplify getting attribute value for span * fix: remove overlapping time from duration aggregation * fix: use valuer.String for enums * feat: add preferences for preview and color by --- docs/api/openapi.yml | 36 +++ .../api/generated/services/sigNoz.schemas.ts | 37 +++ .../tracedetail/impltracedetail/handler.go | 5 + .../tracedetail/impltracedetail/module.go | 7 +- .../impltracedetail/waterfall_test.go | 4 +- .../app/clickhouseReader/reader.go | 12 +- pkg/query-service/model/queryParams.go | 11 +- pkg/query-service/model/response.go | 26 ++ pkg/query-service/model/trace.go | 14 + pkg/types/preferencetypes/name.go | 4 + pkg/types/preferencetypes/preference.go | 18 ++ pkg/types/tracedetailtypes/aggregation.go | 50 ++++ .../tracedetailtypes/aggregation_test.go | 251 ++++++++++++++++++ pkg/types/tracedetailtypes/waterfall_span.go | 44 ++- pkg/types/tracedetailtypes/waterfall_trace.go | 137 +++++++--- 15 files changed, 609 insertions(+), 47 deletions(-) create mode 100644 pkg/types/tracedetailtypes/aggregation.go create mode 100644 pkg/types/tracedetailtypes/aggregation_test.go diff --git a/docs/api/openapi.yml b/docs/api/openapi.yml index 8617c30370..0fbbd951fb 100644 --- a/docs/api/openapi.yml +++ b/docs/api/openapi.yml @@ -4898,6 +4898,11 @@ components: type: object TracedetailtypesGettableWaterfallTrace: properties: + aggregations: + items: + $ref: '#/components/schemas/TracedetailtypesSpanAggregationResult' + nullable: true + type: array endTimestampMillis: minimum: 0 type: integer @@ -4937,6 +4942,11 @@ components: type: object TracedetailtypesPostableWaterfall: properties: + aggregations: + items: + $ref: '#/components/schemas/TracedetailtypesSpanAggregation' + nullable: true + type: array limit: minimum: 0 type: integer @@ -4948,6 +4958,32 @@ components: nullable: true type: array type: object + TracedetailtypesSpanAggregation: + properties: + aggregation: + $ref: '#/components/schemas/TracedetailtypesSpanAggregationType' + field: + $ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey' + type: object + TracedetailtypesSpanAggregationResult: + properties: + aggregation: + $ref: '#/components/schemas/TracedetailtypesSpanAggregationType' + field: + $ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey' + value: + additionalProperties: + minimum: 0 + type: integer + nullable: true + type: object + type: object + TracedetailtypesSpanAggregationType: + enum: + - span_count + - execution_time_percentage + - duration + type: string TracedetailtypesWaterfallSpan: properties: attributes: diff --git a/frontend/src/api/generated/services/sigNoz.schemas.ts b/frontend/src/api/generated/services/sigNoz.schemas.ts index 79b90c59a0..44467be28a 100644 --- a/frontend/src/api/generated/services/sigNoz.schemas.ts +++ b/frontend/src/api/generated/services/sigNoz.schemas.ts @@ -5901,6 +5901,11 @@ export type TracedetailtypesGettableWaterfallTraceDTOServiceNameToTotalDurationM { [key: string]: number } | null; export interface TracedetailtypesGettableWaterfallTraceDTO { + /** + * @type array + * @nullable true + */ + aggregations?: TracedetailtypesSpanAggregationResultDTO[] | null; /** * @type integer * @minimum 0 @@ -5955,6 +5960,11 @@ export interface TracedetailtypesGettableWaterfallTraceDTO { } export interface TracedetailtypesPostableWaterfallDTO { + /** + * @type array + * @nullable true + */ + aggregations?: TracedetailtypesSpanAggregationDTO[] | null; /** * @type integer * @minimum 0 @@ -5971,6 +5981,33 @@ export interface TracedetailtypesPostableWaterfallDTO { uncollapsedSpans?: string[] | null; } +export interface TracedetailtypesSpanAggregationDTO { + aggregation?: TracedetailtypesSpanAggregationTypeDTO; + field?: TelemetrytypesTelemetryFieldKeyDTO; +} + +/** + * @nullable + */ +export type TracedetailtypesSpanAggregationResultDTOValue = { + [key: string]: number; +} | null; + +export interface TracedetailtypesSpanAggregationResultDTO { + aggregation?: TracedetailtypesSpanAggregationTypeDTO; + field?: TelemetrytypesTelemetryFieldKeyDTO; + /** + * @type object + * @nullable true + */ + value?: TracedetailtypesSpanAggregationResultDTOValue; +} + +export enum TracedetailtypesSpanAggregationTypeDTO { + span_count = 'span_count', + execution_time_percentage = 'execution_time_percentage', + duration = 'duration', +} /** * @nullable */ diff --git a/pkg/modules/tracedetail/impltracedetail/handler.go b/pkg/modules/tracedetail/impltracedetail/handler.go index af328ad424..768ac09995 100644 --- a/pkg/modules/tracedetail/impltracedetail/handler.go +++ b/pkg/modules/tracedetail/impltracedetail/handler.go @@ -25,6 +25,11 @@ func (h *handler) GetWaterfall(rw http.ResponseWriter, r *http.Request) { return } + if err := req.Validate(); err != nil { + render.Error(rw, err) + return + } + result, err := h.module.GetWaterfall(r.Context(), mux.Vars(r)["traceID"], req) if err != nil { render.Error(rw, err) diff --git a/pkg/modules/tracedetail/impltracedetail/module.go b/pkg/modules/tracedetail/impltracedetail/module.go index f74bbe913e..1de06107f0 100644 --- a/pkg/modules/tracedetail/impltracedetail/module.go +++ b/pkg/modules/tracedetail/impltracedetail/module.go @@ -37,7 +37,12 @@ func (m *module) GetWaterfall(ctx context.Context, traceID string, req *tracedet m.config.Waterfall.MaxDepthToAutoExpand, ) - return tracedetailtypes.NewGettableWaterfallTrace(waterfallTrace, selectedSpans, uncollapsedSpans, selectedAllSpans), nil + aggregationResults := make([]tracedetailtypes.SpanAggregationResult, 0, len(req.Aggregations)) + for _, a := range req.Aggregations { + aggregationResults = append(aggregationResults, waterfallTrace.GetSpanAggregation(a.Aggregation, a.Field)) + } + + return tracedetailtypes.NewGettableWaterfallTrace(waterfallTrace, selectedSpans, uncollapsedSpans, selectedAllSpans, aggregationResults), nil } // getTraceData returns the waterfall cache for the given traceID with fallback on DB. diff --git a/pkg/modules/tracedetail/impltracedetail/waterfall_test.go b/pkg/modules/tracedetail/impltracedetail/waterfall_test.go index 7ddd01171a..bee3193da6 100644 --- a/pkg/modules/tracedetail/impltracedetail/waterfall_test.go +++ b/pkg/modules/tracedetail/impltracedetail/waterfall_test.go @@ -260,7 +260,7 @@ func TestGetSelectedSpans_MultipleRoots(t *testing.T) { trace := getWaterfallTrace([]*tracedetailtypes.WaterfallSpan{root1, root2}, spanMap) spans, _ := trace.GetSelectedSpans([]string{"root1", "root2"}, "root1", 500, 5) - traceRespnose := tracedetailtypes.NewGettableWaterfallTrace(trace, spans, nil, false) + traceRespnose := tracedetailtypes.NewGettableWaterfallTrace(trace, spans, nil, false, nil) assert.Equal(t, []string{"root1", "child1", "root2", "child2"}, spanIDs(spans), "root1 subtree must precede root2 subtree") assert.Equal(t, "svc-a", traceRespnose.RootServiceName, "metadata comes from first root") @@ -567,7 +567,7 @@ func TestGetAllSpans(t *testing.T) { ) trace := getWaterfallTrace([]*tracedetailtypes.WaterfallSpan{root}, nil) spans := trace.GetAllSpans() - traceResponse := tracedetailtypes.NewGettableWaterfallTrace(trace, spans, nil, true) + traceResponse := tracedetailtypes.NewGettableWaterfallTrace(trace, spans, nil, true, nil) assert.ElementsMatch(t, spanIDs(spans), []string{"root", "childA", "grandchildA", "leafA", "childB", "grandchildB", "leafB"}) assert.Equal(t, "svc", traceResponse.RootServiceName) assert.Equal(t, "root-op", traceResponse.RootServiceEntryPoint) diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 2dacb30809..6765e3452d 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -1154,7 +1154,13 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID if err != nil { r.logger.Info("cache miss for getFlamegraphSpansForTrace", "traceID", traceID) - searchScanResponses, err := r.GetSpansForTrace(ctx, traceID, fmt.Sprintf("SELECT timestamp, duration_nano, span_id, trace_id, has_error,links as references, resource_string_service$$name, name, events FROM %s.%s WHERE trace_id=$1 and ts_bucket_start>=$2 and ts_bucket_start<=$3 ORDER BY timestamp ASC, name ASC", r.TraceDB, r.traceTableName)) + selectCols := "timestamp, duration_nano, span_id, trace_id, has_error, links as references, resource_string_service$$name, name, events" + if len(req.SelectFields) > 0 { + selectCols += ", attributes_string, attributes_number, attributes_bool, resources_string" + } + flamegraphQuery := fmt.Sprintf("SELECT %s FROM %s.%s WHERE trace_id=$1 and ts_bucket_start>=$2 and ts_bucket_start<=$3 ORDER BY timestamp ASC, name ASC", selectCols, r.TraceDB, r.traceTableName) + + searchScanResponses, err := r.GetSpansForTrace(ctx, traceID, flamegraphQuery) if err != nil { return nil, err } @@ -1193,6 +1199,10 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID Children: make([]*model.FlamegraphSpan, 0), } + if len(req.SelectFields) > 0 { + jsonItem.SetRequestedFields(item, req.SelectFields) + } + // metadata calculation startTimeUnixNano := uint64(item.TimeUnixNano.UnixNano()) if startTime == 0 || startTimeUnixNano < startTime { diff --git a/pkg/query-service/model/queryParams.go b/pkg/query-service/model/queryParams.go index 56faa8032f..570c21bdd7 100644 --- a/pkg/query-service/model/queryParams.go +++ b/pkg/query-service/model/queryParams.go @@ -2,6 +2,8 @@ package model import ( "time" + + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" ) type InstantQueryMetricsParams struct { @@ -337,10 +339,11 @@ type GetWaterfallSpansForTraceWithMetadataParams struct { } type GetFlamegraphSpansForTraceParams struct { - SelectedSpanID string `json:"selectedSpanId"` - Limit uint `json:"limit"` - BoundaryStartTS uint64 `json:"boundaryStartTsMilli"` - BoundaryEndTS uint64 `json:"boundarEndTsMilli"` + SelectedSpanID string `json:"selectedSpanId"` + Limit uint `json:"limit"` + BoundaryStartTS uint64 `json:"boundaryStartTsMilli"` + BoundaryEndTS uint64 `json:"boundarEndTsMilli"` + SelectFields []telemetrytypes.TelemetryFieldKey `json:"selectFields"` } type SpanFilterParams struct { diff --git a/pkg/query-service/model/response.go b/pkg/query-service/model/response.go index 9e83bca7a1..c247fa46e4 100644 --- a/pkg/query-service/model/response.go +++ b/pkg/query-service/model/response.go @@ -7,6 +7,7 @@ import ( "strconv" "time" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/pkg/errors" "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/util/stats" @@ -314,6 +315,31 @@ type FlamegraphSpan struct { Events []Event `json:"event"` References []OtelSpanRef `json:"references,omitempty"` Children []*FlamegraphSpan `json:"children"` + Attributes map[string]any `json:"attributes,omitempty"` + Resource map[string]string `json:"resource,omitempty"` +} + +// SetRequestedFields extracts the requested attribute/resource fields from item into s. +// This can eventually support missing fieldContext by checking both +func (s *FlamegraphSpan) SetRequestedFields(item SpanItemV2, fields []telemetrytypes.TelemetryFieldKey) { + for _, field := range fields { + switch field.FieldContext { + case telemetrytypes.FieldContextResource: + if v, ok := item.Resources_string[field.Name]; ok && v != "" { + if s.Resource == nil { + s.Resource = make(map[string]string) + } + s.Resource[field.Name] = v + } + case telemetrytypes.FieldContextAttribute: + if v := item.AttributeValue(field.Name); v != nil { + if s.Attributes == nil { + s.Attributes = make(map[string]any) + } + s.Attributes[field.Name] = v + } + } + } } type GetWaterfallSpansForTraceWithMetadataResponse struct { diff --git a/pkg/query-service/model/trace.go b/pkg/query-service/model/trace.go index ccee5a83d2..48d127ec87 100644 --- a/pkg/query-service/model/trace.go +++ b/pkg/query-service/model/trace.go @@ -29,3 +29,17 @@ type TraceSummary struct { End time.Time `ch:"end"` NumSpans uint64 `ch:"num_spans"` } + +// AttributeValue looks up an attribute across string, number, and bool maps in priority order. +func (s SpanItemV2) AttributeValue(name string) any { + if v, ok := s.Attributes_string[name]; ok { + return v + } + if v, ok := s.Attributes_number[name]; ok { + return v + } + if v, ok := s.Attributes_bool[name]; ok { + return v + } + return nil +} diff --git a/pkg/types/preferencetypes/name.go b/pkg/types/preferencetypes/name.go index 38c07bb95b..5b0643ae25 100644 --- a/pkg/types/preferencetypes/name.go +++ b/pkg/types/preferencetypes/name.go @@ -20,6 +20,8 @@ var ( NameNavShortcuts = Name{valuer.NewString("nav_shortcuts")} NameLastSeenChangelogVersion = Name{valuer.NewString("last_seen_changelog_version")} NameSpanDetailsPinnedAttributes = Name{valuer.NewString("span_details_pinned_attributes")} + NameSpanDetailsPreviewAttributes = Name{valuer.NewString("span_details_preview_attributes")} + NameSpanDetailsColorByAttribute = Name{valuer.NewString("span_details_color_by_attribute")} NameSpanPercentileResourceAttributes = Name{valuer.NewString("span_percentile_resource_attributes")} ) @@ -40,6 +42,8 @@ func NewName(name string) (Name, error) { NameNavShortcuts.StringValue(), NameLastSeenChangelogVersion.StringValue(), NameSpanDetailsPinnedAttributes.StringValue(), + NameSpanDetailsPreviewAttributes.StringValue(), + NameSpanDetailsColorByAttribute.StringValue(), NameSpanPercentileResourceAttributes.StringValue(), }, name, diff --git a/pkg/types/preferencetypes/preference.go b/pkg/types/preferencetypes/preference.go index c8ab6bafef..df91acb777 100644 --- a/pkg/types/preferencetypes/preference.go +++ b/pkg/types/preferencetypes/preference.go @@ -163,6 +163,24 @@ func NewAvailablePreference() map[Name]Preference { AllowedValues: []string{}, Value: MustNewValue([]any{}, ValueTypeArray), }, + NameSpanDetailsPreviewAttributes: { + Name: NameSpanDetailsPreviewAttributes, + Description: "List of preview attributes in span tooltip.", + ValueType: ValueTypeArray, + DefaultValue: MustNewValue([]any{}, ValueTypeArray), + AllowedScopes: []Scope{ScopeUser}, + AllowedValues: []string{}, + Value: MustNewValue([]any{}, ValueTypeArray), + }, + NameSpanDetailsColorByAttribute: { + Name: NameSpanDetailsColorByAttribute, + Description: "Attribute used to color spans in span details.", + ValueType: ValueTypeString, + DefaultValue: MustNewValue("", ValueTypeString), + AllowedScopes: []Scope{ScopeUser}, + AllowedValues: []string{}, + Value: MustNewValue("", ValueTypeString), + }, NameSpanPercentileResourceAttributes: { Name: NameSpanPercentileResourceAttributes, Description: "Additional resource attributes for span percentile filtering (beyond mandatory name and service.name).", diff --git a/pkg/types/tracedetailtypes/aggregation.go b/pkg/types/tracedetailtypes/aggregation.go new file mode 100644 index 0000000000..c06bb16982 --- /dev/null +++ b/pkg/types/tracedetailtypes/aggregation.go @@ -0,0 +1,50 @@ +package tracedetailtypes + +import ( + "slices" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +const maxAggregationItems = 10 + +var ErrTooManyAggregationItems = errors.NewInvalidInputf(errors.CodeInvalidInput, "aggregations request exceeds maximum of %d items", maxAggregationItems) + +// SpanAggregationType defines the aggregation to compute over spans grouped by a field. +type SpanAggregationType struct { + valuer.String +} + +var ( + SpanAggregationSpanCount = SpanAggregationType{valuer.NewString("span_count")} + SpanAggregationExecutionTimePercentage = SpanAggregationType{valuer.NewString("execution_time_percentage")} + SpanAggregationDuration = SpanAggregationType{valuer.NewString("duration")} +) + +// SpanAggregation is a single aggregation request item: which field to group by and how. +type SpanAggregation struct { + Field telemetrytypes.TelemetryFieldKey `json:"field"` + Aggregation SpanAggregationType `json:"aggregation"` +} + +// SpanAggregationResult is the computed result for one aggregation request item. +// Duration values are in milliseconds. +type SpanAggregationResult struct { + Field telemetrytypes.TelemetryFieldKey `json:"field"` + Aggregation SpanAggregationType `json:"aggregation"` + Value map[string]uint64 `json:"value" nullable:"true"` +} + +func (SpanAggregationType) Enum() []any { + return []any{ + SpanAggregationSpanCount, + SpanAggregationExecutionTimePercentage, + SpanAggregationDuration, + } +} + +func (s SpanAggregationType) isValid() bool { + return slices.ContainsFunc(s.Enum(), func(v any) bool { return v == s }) +} diff --git a/pkg/types/tracedetailtypes/aggregation_test.go b/pkg/types/tracedetailtypes/aggregation_test.go new file mode 100644 index 0000000000..f1000aa72f --- /dev/null +++ b/pkg/types/tracedetailtypes/aggregation_test.go @@ -0,0 +1,251 @@ +package tracedetailtypes + +import ( + "testing" + + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/stretchr/testify/assert" +) + +// mkASpan builds a WaterfallSpan with timing and field data for analytics tests. +func mkASpan(id string, resource map[string]string, attributes map[string]any, startNs, durationNs uint64) *WaterfallSpan { + return &WaterfallSpan{ + SpanID: id, + Resource: resource, + Attributes: attributes, + TimeUnixNano: startNs, + DurationNano: durationNs, + Children: make([]*WaterfallSpan, 0), + } +} + +func buildTraceFromSpans(spans ...*WaterfallSpan) *WaterfallTrace { + spanMap := make(map[string]*WaterfallSpan, len(spans)) + var startTime, endTime uint64 + initialized := false + for _, s := range spans { + spanMap[s.SpanID] = s + if !initialized || s.TimeUnixNano < startTime { + startTime = s.TimeUnixNano + initialized = true + } + if end := s.TimeUnixNano + s.DurationNano; end > endTime { + endTime = end + } + } + return NewWaterfallTrace(startTime, endTime, uint64(len(spanMap)), 0, spanMap, nil, nil, false) +} + +var ( + fieldServiceName = telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + FieldContext: telemetrytypes.FieldContextResource, + } + fieldHTTPMethod = telemetrytypes.TelemetryFieldKey{ + Name: "http.method", + FieldContext: telemetrytypes.FieldContextAttribute, + } + fieldCached = telemetrytypes.TelemetryFieldKey{ + Name: "db.cached", + FieldContext: telemetrytypes.FieldContextAttribute, + } +) + +func TestGetSpanAggregation_SpanCount(t *testing.T) { + tests := []struct { + name string + trace *WaterfallTrace + field telemetrytypes.TelemetryFieldKey + want map[string]uint64 + }{ + { + name: "counts by resource field", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "frontend"}, nil, 0, 10), + mkASpan("s2", map[string]string{"service.name": "frontend"}, nil, 10, 5), + mkASpan("s3", map[string]string{"service.name": "backend"}, nil, 20, 8), + ), + field: fieldServiceName, + want: map[string]uint64{"frontend": 2, "backend": 1}, + }, + { + name: "counts by string attribute field", + trace: buildTraceFromSpans( + mkASpan("s1", nil, map[string]any{"http.method": "GET"}, 0, 10), + mkASpan("s2", nil, map[string]any{"http.method": "POST"}, 10, 5), + mkASpan("s3", nil, map[string]any{"http.method": "GET"}, 20, 8), + ), + field: fieldHTTPMethod, + want: map[string]uint64{"GET": 2, "POST": 1}, + }, + { + name: "counts by boolean attribute field", + trace: buildTraceFromSpans( + mkASpan("s1", nil, map[string]any{"db.cached": true}, 0, 10), + mkASpan("s2", nil, map[string]any{"db.cached": false}, 10, 5), + mkASpan("s3", nil, map[string]any{"db.cached": true}, 20, 8), + ), + field: fieldCached, + want: map[string]uint64{"true": 2, "false": 1}, + }, + { + name: "spans missing the field are excluded", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "frontend"}, nil, 0, 10), + mkASpan("s2", map[string]string{}, nil, 10, 5), // no service.name + mkASpan("s3", map[string]string{"service.name": "backend"}, nil, 20, 8), + ), + field: fieldServiceName, + want: map[string]uint64{"frontend": 1, "backend": 1}, + }, + { + // empty string is a valid field value — counted under the "" key, unlike a missing field + name: "span with empty service.name is counted under empty string key", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "frontend"}, nil, 0, 10), + mkASpan("s2", map[string]string{"service.name": ""}, nil, 10, 5), + mkASpan("s3", map[string]string{"service.name": "backend"}, nil, 20, 8), + ), + field: fieldServiceName, + want: map[string]uint64{"frontend": 1, "backend": 1, "": 1}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := tc.trace.GetSpanAggregation(SpanAggregationSpanCount, tc.field) + assert.Equal(t, tc.field, result.Field) + assert.Equal(t, SpanAggregationSpanCount, result.Aggregation) + assert.Equal(t, tc.want, result.Value) + }) + } +} + +func TestGetSpanAggregation_Duration(t *testing.T) { + tests := []struct { + name string + trace *WaterfallTrace + field telemetrytypes.TelemetryFieldKey + want map[string]uint64 + }{ + { + name: "non-overlapping spans — merged equals sum", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "frontend"}, nil, 0, 100), + mkASpan("s2", map[string]string{"service.name": "frontend"}, nil, 100, 50), + mkASpan("s3", map[string]string{"service.name": "backend"}, nil, 0, 80), + ), + field: fieldServiceName, + want: map[string]uint64{"frontend": 150, "backend": 80}, + }, + { + name: "non-overlapping attribute groups — merged equals sum", + trace: buildTraceFromSpans( + mkASpan("s1", nil, map[string]any{"http.method": "GET"}, 0, 30), + mkASpan("s2", nil, map[string]any{"http.method": "GET"}, 50, 20), + mkASpan("s3", nil, map[string]any{"http.method": "POST"}, 0, 70), + ), + field: fieldHTTPMethod, + want: map[string]uint64{"GET": 50, "POST": 70}, + }, + { + name: "overlapping spans — non-overlapping interval merge", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "svc"}, nil, 0, 10), + mkASpan("s2", map[string]string{"service.name": "svc"}, nil, 5, 10), + ), + field: fieldServiceName, + want: map[string]uint64{"svc": 15}, // [0,10] ∪ [5,15] = [0,15] + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := tc.trace.GetSpanAggregation(SpanAggregationDuration, tc.field) + assert.Equal(t, tc.field, result.Field) + assert.Equal(t, SpanAggregationDuration, result.Aggregation) + assert.Equal(t, tc.want, result.Value) + }) + } +} + +func TestGetSpanAggregation_ExecutionTimePercentage(t *testing.T) { + tests := []struct { + name string + trace *WaterfallTrace + field telemetrytypes.TelemetryFieldKey + want map[string]uint64 + }{ + { + // trace [0,30]: svc occupies [0,10]+[20,30]=20 → 20*100/30 = 66% + name: "non-overlapping spans", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "svc"}, nil, 0, 10), + mkASpan("s2", map[string]string{"service.name": "svc"}, nil, 20, 10), + ), + field: fieldServiceName, + want: map[string]uint64{"svc": 66}, + }, + { + // trace [0,15]: svc [0,15]=15 → 100% + name: "partially overlapping spans", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "svc"}, nil, 0, 10), + mkASpan("s2", map[string]string{"service.name": "svc"}, nil, 5, 10), + ), + field: fieldServiceName, + want: map[string]uint64{"svc": 100}, + }, + { + // trace [0,20]: outer absorbs inner → 100% + name: "fully contained span", + trace: buildTraceFromSpans( + mkASpan("outer", map[string]string{"service.name": "svc"}, nil, 0, 20), + mkASpan("inner", map[string]string{"service.name": "svc"}, nil, 5, 5), + ), + field: fieldServiceName, + want: map[string]uint64{"svc": 100}, + }, + { + // trace [0,30]: svc [0,15]+[20,30]=25 → 25*100/30 = 83% + name: "three spans with two merges", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "svc"}, nil, 0, 10), + mkASpan("s2", map[string]string{"service.name": "svc"}, nil, 5, 10), + mkASpan("s3", map[string]string{"service.name": "svc"}, nil, 20, 10), + ), + field: fieldServiceName, + want: map[string]uint64{"svc": 83}, + }, + { + // trace [0,28]: frontend [0,15]=15 → 53%, backend [0,5]+[20,28]=13 → 46% + name: "independent groups are computed separately", + trace: buildTraceFromSpans( + mkASpan("a1", map[string]string{"service.name": "frontend"}, nil, 0, 10), + mkASpan("a2", map[string]string{"service.name": "frontend"}, nil, 5, 10), + mkASpan("b1", map[string]string{"service.name": "backend"}, nil, 0, 5), + mkASpan("b2", map[string]string{"service.name": "backend"}, nil, 20, 8), + ), + field: fieldServiceName, + want: map[string]uint64{"frontend": 53, "backend": 46}, + }, + { + // trace [100,150]: svc [100,150]=50 → 100% + name: "single span", + trace: buildTraceFromSpans( + mkASpan("s1", map[string]string{"service.name": "svc"}, nil, 100, 50), + ), + field: fieldServiceName, + want: map[string]uint64{"svc": 100}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := tc.trace.GetSpanAggregation(SpanAggregationExecutionTimePercentage, tc.field) + assert.Equal(t, tc.field, result.Field) + assert.Equal(t, SpanAggregationExecutionTimePercentage, result.Aggregation) + assert.Equal(t, tc.want, result.Value) + }) + } +} diff --git a/pkg/types/tracedetailtypes/waterfall_span.go b/pkg/types/tracedetailtypes/waterfall_span.go index 28ca5fa4a6..90397e720b 100644 --- a/pkg/types/tracedetailtypes/waterfall_span.go +++ b/pkg/types/tracedetailtypes/waterfall_span.go @@ -2,11 +2,13 @@ package tracedetailtypes import ( "encoding/json" + "fmt" "maps" "sort" "time" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" ) const ( @@ -21,9 +23,27 @@ var ErrTraceNotFound = errors.NewNotFoundf(errors.CodeNotFound, "trace not found // PostableWaterfall is the request body for the v3 waterfall API. type PostableWaterfall struct { - SelectedSpanID string `json:"selectedSpanId"` - UncollapsedSpans []string `json:"uncollapsedSpans"` - Limit uint `json:"limit"` + SelectedSpanID string `json:"selectedSpanId"` + UncollapsedSpans []string `json:"uncollapsedSpans"` + Limit uint `json:"limit"` + Aggregations []SpanAggregation `json:"aggregations"` +} + +func (p *PostableWaterfall) Validate() error { + if len(p.Aggregations) > maxAggregationItems { + return ErrTooManyAggregationItems + } + for _, a := range p.Aggregations { + if !a.Aggregation.isValid() { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown aggregation type: %q", a.Aggregation) + } + fc := a.Field.FieldContext + if fc != telemetrytypes.FieldContextResource && fc != telemetrytypes.FieldContextAttribute { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "aggregation field context must be %q or %q, got %q", + telemetrytypes.FieldContextResource, telemetrytypes.FieldContextAttribute, fc) + } + } + return nil } // Event represents a span event. @@ -160,7 +180,23 @@ func (ws *WaterfallSpan) GetSubtreeNodeCount() uint64 { return count } -// getPreOrderedSpans returns spans in pre-order, uncollapsedSpanIDs must be pre-computed. +// FieldValue returns the string representation of field's value on this span for grouping. +// The bool reports whether the field was present with a non-empty value. +func (ws *WaterfallSpan) FieldValue(field telemetrytypes.TelemetryFieldKey) (string, bool) { + switch field.FieldContext { + case telemetrytypes.FieldContextResource: + v, ok := ws.Resource[field.Name] + return v, ok + case telemetrytypes.FieldContextAttribute: + v, ok := ws.Attributes[field.Name] + if !ok { + return "", false + } + return fmt.Sprintf("%v", v), true + } + return "", false +} + func (ws *WaterfallSpan) getPreOrderedSpans(uncollapsedSpanIDs map[string]struct{}, selectAll bool, level uint64) []*WaterfallSpan { result := []*WaterfallSpan{ws.GetWithoutChildren(level)} _, isUncollapsed := uncollapsedSpanIDs[ws.SpanID] diff --git a/pkg/types/tracedetailtypes/waterfall_trace.go b/pkg/types/tracedetailtypes/waterfall_trace.go index 7e808a0441..4eb28ff613 100644 --- a/pkg/types/tracedetailtypes/waterfall_trace.go +++ b/pkg/types/tracedetailtypes/waterfall_trace.go @@ -8,6 +8,7 @@ import ( "time" "github.com/SigNoz/signoz/pkg/types/cachetypes" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" ) type TraceSummary struct { @@ -31,17 +32,19 @@ type WaterfallTrace struct { // GettableWaterfallTrace is the response for the v3 waterfall API. type GettableWaterfallTrace struct { - StartTimestampMillis uint64 `json:"startTimestampMillis"` - EndTimestampMillis uint64 `json:"endTimestampMillis"` - RootServiceName string `json:"rootServiceName"` - RootServiceEntryPoint string `json:"rootServiceEntryPoint"` - TotalSpansCount uint64 `json:"totalSpansCount"` - TotalErrorSpansCount uint64 `json:"totalErrorSpansCount"` - ServiceNameToTotalDurationMap map[string]uint64 `json:"serviceNameToTotalDurationMap"` - Spans []*WaterfallSpan `json:"spans"` - HasMissingSpans bool `json:"hasMissingSpans"` - UncollapsedSpans []string `json:"uncollapsedSpans"` - HasMore bool `json:"hasMore"` + StartTimestampMillis uint64 `json:"startTimestampMillis"` + EndTimestampMillis uint64 `json:"endTimestampMillis"` + RootServiceName string `json:"rootServiceName"` + RootServiceEntryPoint string `json:"rootServiceEntryPoint"` + TotalSpansCount uint64 `json:"totalSpansCount"` + TotalErrorSpansCount uint64 `json:"totalErrorSpansCount"` + // Deprecated: use Aggregations with SpanAggregationExecutionTimePercentage on the service.name field instead. + ServiceNameToTotalDurationMap map[string]uint64 `json:"serviceNameToTotalDurationMap"` + Spans []*WaterfallSpan `json:"spans"` + HasMissingSpans bool `json:"hasMissingSpans"` + UncollapsedSpans []string `json:"uncollapsedSpans"` + HasMore bool `json:"hasMore"` + Aggregations []SpanAggregationResult `json:"aggregations"` } // NewWaterfallTrace constructs a WaterfallTrace from processed span data. @@ -240,12 +243,13 @@ func (wt *WaterfallTrace) UnmarshalBinary(data []byte) error { return json.Unmarshal(data, wt) } -// NewGettableWaterfallTrace constructs a WaterfallResponse from processed trace data and selected spans. +// NewGettableWaterfallTrace constructs a GettableWaterfallTrace from processed trace data and selected spans. func NewGettableWaterfallTrace( traceData *WaterfallTrace, selectedSpans []*WaterfallSpan, uncollapsedSpans []string, selectAllSpans bool, + aggregations []SpanAggregationResult, ) *GettableWaterfallTrace { var rootServiceName, rootServiceEntryPoint string if len(traceData.TraceRoots) > 0 { @@ -263,6 +267,15 @@ func NewGettableWaterfallTrace( span.TimeUnixNano = span.TimeUnixNano / 1_000_000 } + // duration values are in nanoseconds; convert in-place to milliseconds. + for i := range aggregations { + if aggregations[i].Aggregation == SpanAggregationDuration { + for k, v := range aggregations[i].Value { + aggregations[i].Value[k] = v / 1_000_000 + } + } + } + return &GettableWaterfallTrace{ Spans: selectedSpans, UncollapsedSpans: uncollapsedSpans, @@ -275,6 +288,7 @@ func NewGettableWaterfallTrace( ServiceNameToTotalDurationMap: serviceDurationsMillis, HasMissingSpans: traceData.HasMissingSpans, HasMore: !selectAllSpans, + Aggregations: aggregations, } } @@ -307,29 +321,82 @@ func calculateServiceTime(spanIDToSpanNodeMap map[string]*WaterfallSpan) map[str totalTimes := make(map[string]uint64) for service, spans := range serviceSpans { - sort.Slice(spans, func(i, j int) bool { - return spans[i].TimeUnixNano < spans[j].TimeUnixNano - }) - - currentStart := spans[0].TimeUnixNano - currentEnd := currentStart + spans[0].DurationNano - total := uint64(0) - - for _, span := range spans[1:] { - startNano := span.TimeUnixNano - endNano := startNano + span.DurationNano - if currentEnd >= startNano { - if endNano > currentEnd { - currentEnd = endNano - } - } else { - total += currentEnd - currentStart - currentStart = startNano - currentEnd = endNano - } - } - total += currentEnd - currentStart - totalTimes[service] = total + totalTimes[service] = mergeSpanIntervals(spans) } return totalTimes } + +// mergeSpanIntervals computes non-overlapping execution time for a set of spans. +func mergeSpanIntervals(spans []*WaterfallSpan) uint64 { + if len(spans) == 0 { + return 0 + } + sort.Slice(spans, func(i, j int) bool { + return spans[i].TimeUnixNano < spans[j].TimeUnixNano + }) + + currentStart := spans[0].TimeUnixNano + currentEnd := currentStart + spans[0].DurationNano + total := uint64(0) + + for _, span := range spans[1:] { + startNano := span.TimeUnixNano + endNano := startNano + span.DurationNano + if currentEnd >= startNano { + if endNano > currentEnd { + currentEnd = endNano + } + } else { + total += currentEnd - currentStart + currentStart = startNano + currentEnd = endNano + } + } + return total + (currentEnd - currentStart) +} + +// GetSpanAggregation computes one aggregation result over all spans in the trace. +// Duration values are returned in nanoseconds; callers convert to milliseconds as needed. +func (wt *WaterfallTrace) GetSpanAggregation(aggregation SpanAggregationType, field telemetrytypes.TelemetryFieldKey) SpanAggregationResult { + result := SpanAggregationResult{ + Field: field, + Aggregation: aggregation, + Value: make(map[string]uint64), + } + + switch aggregation { + case SpanAggregationSpanCount: + for _, span := range wt.SpanIDToSpanNodeMap { + if key, ok := span.FieldValue(field); ok { + result.Value[key]++ + } + } + + case SpanAggregationDuration: + spansByField := make(map[string][]*WaterfallSpan) + for _, span := range wt.SpanIDToSpanNodeMap { + if key, ok := span.FieldValue(field); ok { + spansByField[key] = append(spansByField[key], span) + } + } + for key, spans := range spansByField { + result.Value[key] = mergeSpanIntervals(spans) + } + + case SpanAggregationExecutionTimePercentage: + traceDuration := wt.EndTime - wt.StartTime + spansByField := make(map[string][]*WaterfallSpan) + for _, span := range wt.SpanIDToSpanNodeMap { + if key, ok := span.FieldValue(field); ok { + spansByField[key] = append(spansByField[key], span) + } + } + if traceDuration > 0 { + for key, spans := range spansByField { + result.Value[key] = mergeSpanIntervals(spans) * 100 / traceDuration + } + } + } + + return result +} From 69212ef9b1b69668d48db6ea51e41e545d122300 Mon Sep 17 00:00:00 2001 From: swapnil-signoz Date: Tue, 28 Apr 2026 23:16:29 +0530 Subject: [PATCH 12/19] feat(cloudintegrations): add azure dashboards (#11094) * refactor: moving types to cloud provider specific namespace/pkg * refactor: separating cloud provider types * refactor: using upper case key for AWS * feat: adding cloud integration azure types * feat: adding azure services * refactor: updating omitempty tags * refactor: updating azure integration config * feat: completing azure types * refactor: lint issues * feat: adding service definitions for azure * refactor: update service names for Azure Blob Storage telemetry * refactor: updating definitions with metrics and strategy * refactor: updating command key * fix: handle optional connection URL in AWS integration * feat: wip * refactor: updating strategy struct * refactor: updating telemetry strategy * refactor: updating connection artifact struct * refactor: updating blob storage service name * refactor: updating azure blob storage service name * refactor: update Azure service identifiers * refactor: updating service defs * fix: update integration account ID and add agent version to Azure CLI and PowerShell commands * refactor: updating deny settings mode * refactor: updating types * refactor: adding missing case for azure service update * feat: implement Azure connection commands and add unit tests * refactor: using template for Azure connection artifact creation and update tests * feat: adding dashboards for azure * refactor: updated aggregrations * refactor: updating aggregations --- .../assets/dashboards/overview.json | 1493 ++++++++++++- .../azure/cdnprofile/integration.json | 18 + .../assets/dashboards/overview.json | 1987 ++++++++++++++++- 3 files changed, 3496 insertions(+), 2 deletions(-) diff --git a/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/assets/dashboards/overview.json b/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/assets/dashboards/overview.json index 0967ef424b..02c74b6eee 100644 --- a/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/assets/dashboards/overview.json +++ b/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/assets/dashboards/overview.json @@ -1 +1,1492 @@ -{} +{ + "description": "Out of the box dashboard for Azure CDN Profile.", + "image": "data:image/svg+xml;base64,PHN2ZyBpZD0iYjMwMGYwZDEtMmFkOC00NDE4LWExYzUtMjNkMGI5ZDIxODQxIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOCAxOCI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJiOGNhZDZmZC1lYzdmLTQ1ZTktYmUyYS0xMjVlOGI4N2JkMDMiIHgxPSIxMC43OSIgeTE9IjIuMTciIHgyPSIxMC43OSIgeTI9IjE2LjU2IiBncmFkaWVudFVuaXRzPSJ1c2VyU3BhY2VPblVzZSI+PHN0b3Agb2Zmc2V0PSIwLjE4IiBzdG9wLWNvbG9yPSIjNWVhMGVmIi8+PHN0b3Agb2Zmc2V0PSIxIiBzdG9wLWNvbG9yPSIjMDA3OGQ0Ii8+PC9saW5lYXJHcmFkaWVudD48L2RlZnM+PHRpdGxlPkljb24td2ViLTQzPC90aXRsZT48cmVjdCB4PSIzLjciIHk9IjUuNDkiIHdpZHRoPSIxLjE4IiBoZWlnaHQ9IjUuMjYiIHJ4PSIwLjUyIiB0cmFuc2Zvcm09InRyYW5zbGF0ZSgtMy44MyAxMi40MSkgcm90YXRlKC05MCkiIGZpbGw9IiNiM2IzYjMiLz48cmVjdCB4PSIyLjA0IiB5PSI3Ljg4IiB3aWR0aD0iMS4xOCIgaGVpZ2h0PSI1LjI2IiByeD0iMC41MiIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoLTcuODggMTMuMTQpIHJvdGF0ZSgtOTApIiBmaWxsPSIjYTNhM2EzIi8+PHJlY3QgeD0iMy43IiB5PSIxMC4yNiIgd2lkdGg9IjEuMTgiIGhlaWdodD0iNS4yNiIgcng9IjAuNTIiIHRyYW5zZm9ybT0idHJhbnNsYXRlKC04LjYgMTcuMTkpIHJvdGF0ZSgtOTApIiBmaWxsPSIjN2E3YTdhIi8+PHBhdGggZD0iTTE4LDExYTMuMjgsMy4yOCwwLDAsMC0yLjgxLTMuMTgsNC4xMyw0LjEzLDAsMCwwLTQuMjEtNCw0LjIzLDQuMjMsMCwwLDAtNCwyLjgsMy44OSwzLjg5LDAsMCwwLTMuMzgsMy44LDQsNCwwLDAsMCw0LjA2LDMuODZsLjM2LDBoNi41OGwuMTcsMEEzLjMyLDMuMzIsMCwwLDAsMTgsMTFaIiBmaWxsPSJ1cmwoI2I4Y2FkNmZkLWVjN2YtNDVlOS1iZTJhLTEyNWU4Yjg3YmQwMykiLz48L3N2Zz4=", + "layout": [ + { + "h": 1, + "i": "row-latency", + "maxH": 1, + "minH": 1, + "minW": 12, + "moved": false, + "static": false, + "w": 12, + "x": 0, + "y": 0 + }, + { + "h": 6, + "i": "w-totallatency", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 1 + }, + { + "h": 6, + "i": "w-originlatency", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 1 + }, + { + "h": 6, + "i": "w-avgwsconnectionduration", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 7 + }, + { + "h": 1, + "i": "row-originhealth", + "maxH": 1, + "minH": 1, + "minW": 12, + "moved": false, + "static": false, + "w": 12, + "x": 0, + "y": 13 + }, + { + "h": 6, + "i": "w-originhealthpercentage", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 14 + }, + { + "h": 1, + "i": "row-requeststatus", + "maxH": 1, + "minH": 1, + "minW": 12, + "moved": false, + "static": false, + "w": 12, + "x": 0, + "y": 20 + }, + { + "h": 6, + "i": "w-bytehitratio", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 21 + }, + { + "h": 6, + "i": "w-percentage4xx", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 21 + }, + { + "h": 6, + "i": "w-percentage5xx", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 27 + }, + { + "h": 1, + "i": "row-traffic", + "maxH": 1, + "minH": 1, + "minW": 12, + "moved": false, + "static": false, + "w": 12, + "x": 0, + "y": 33 + }, + { "h": 6, "i": "w-requestcount", "moved": false, "static": false, "w": 6, "x": 0, "y": 34 }, + { "h": 6, "i": "w-originrequestcount", "moved": false, "static": false, "w": 6, "x": 6, "y": 34 }, + { "h": 6, "i": "w-requestsize", "moved": false, "static": false, "w": 6, "x": 0, "y": 40 }, + { "h": 6, "i": "w-responsesize", "moved": false, "static": false, "w": 6, "x": 6, "y": 40 }, + { "h": 6, "i": "w-activewebsocketconnections", "moved": false, "static": false, "w": 6, "x": 0, "y": 46 }, + { "h": 6, "i": "w-websocketconnections", "moved": false, "static": false, "w": 6, "x": 6, "y": 46 }, + { "h": 6, "i": "w-wafrequestcount", "moved": false, "static": false, "w": 6, "x": 0, "y": 52 }, + { "h": 6, "i": "w-wafcaptcharequestcount", "moved": false, "static": false, "w": 6, "x": 6, "y": 52 }, + { "h": 6, "i": "w-wafjsrequestcount", "moved": false, "static": false, "w": 6, "x": 0, "y": 58 } + ], + "panelMap": { + "row-latency": { + "collapsed": false, + "widgets": [ + { + "h": 6, + "i": "w-totallatency", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 1 + }, + { + "h": 6, + "i": "w-originlatency", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 1 + }, + { + "h": 6, + "i": "w-avgwsconnectionduration", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 7 + } + ] + }, + "row-originhealth": { + "collapsed": false, + "widgets": [ + { + "h": 6, + "i": "w-originhealthpercentage", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 14 + } + ] + }, + "row-traffic": { + "collapsed": false, + "widgets": [ + { "h": 6, "i": "w-requestcount", "moved": false, "static": false, "w": 6, "x": 0, "y": 34 }, + { "h": 6, "i": "w-originrequestcount", "moved": false, "static": false, "w": 6, "x": 6, "y": 34 }, + { "h": 6, "i": "w-requestsize", "moved": false, "static": false, "w": 6, "x": 0, "y": 40 }, + { "h": 6, "i": "w-responsesize", "moved": false, "static": false, "w": 6, "x": 6, "y": 40 }, + { "h": 6, "i": "w-activewebsocketconnections", "moved": false, "static": false, "w": 6, "x": 0, "y": 46 }, + { "h": 6, "i": "w-websocketconnections", "moved": false, "static": false, "w": 6, "x": 6, "y": 46 }, + { "h": 6, "i": "w-wafrequestcount", "moved": false, "static": false, "w": 6, "x": 0, "y": 52 }, + { "h": 6, "i": "w-wafcaptcharequestcount", "moved": false, "static": false, "w": 6, "x": 6, "y": 52 }, + { "h": 6, "i": "w-wafjsrequestcount", "moved": false, "static": false, "w": 6, "x": 0, "y": 58 } + ] + }, + "row-requeststatus": { + "collapsed": false, + "widgets": [ + { + "h": 6, + "i": "w-bytehitratio", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 21 + }, + { + "h": 6, + "i": "w-percentage4xx", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 21 + }, + { + "h": 6, + "i": "w-percentage5xx", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 27 + } + ] + } + }, + "tags": [], + "title": "Azure CDN Profile Overview", + "uploadedGrafana": false, + "variables": { + "a1b2c3d4-0001-4000-8000-000000000001": { + "allSelected": false, + "customValue": "", + "defaultValue": "", + "description": "Subscription ID for your Azure Account", + "dynamicVariablesAttribute": "azuremonitor.subscription_id", + "dynamicVariablesSource": "Metrics", + "id": "a1b2c3d4-0001-4000-8000-000000000001", + "key": "a1b2c3d4-0001-4000-8000-000000000001", + "modificationUUID": "a1b2c3d4-0001-4000-8000-000000000011", + "multiSelect": false, + "name": "azuremonitor.subscription_id", + "order": 0, + "queryValue": "", + "showALLOption": true, + "sort": "DISABLED", + "textboxValue": "", + "type": "DYNAMIC" + }, + "a1b2c3d4-0002-4000-8000-000000000002": { + "allSelected": false, + "customValue": "", + "defaultValue": "", + "description": "Resource Group for your resources", + "dynamicVariablesAttribute": "resource_group", + "dynamicVariablesSource": "Metrics", + "id": "a1b2c3d4-0002-4000-8000-000000000002", + "key": "a1b2c3d4-0002-4000-8000-000000000002", + "modificationUUID": "a1b2c3d4-0002-4000-8000-000000000022", + "multiSelect": false, + "name": "resource_group", + "order": 1, + "queryValue": "", + "showALLOption": true, + "sort": "DISABLED", + "textboxValue": "", + "type": "DYNAMIC" + }, + "a1b2c3d4-0003-4000-8000-000000000003": { + "allSelected": false, + "customValue": "", + "defaultValue": "", + "description": "Name of the CDN Profile", + "dynamicVariablesAttribute": "name", + "dynamicVariablesSource": "Metrics", + "id": "a1b2c3d4-0003-4000-8000-000000000003", + "modificationUUID": "a1b2c3d4-0003-4000-8000-000000000033", + "multiSelect": false, + "name": "cdnprofile", + "order": 2, + "queryValue": "", + "showALLOption": true, + "sort": "DISABLED", + "textboxValue": "", + "type": "DYNAMIC" + } + }, + "version": "v5", + "widgets": [ + { + "id": "row-latency", + "panelTypes": "row", + "title": "Latency" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The time calculated from when the client request was received by the HTTP/S proxy until the client acknowledged the last response byte from the HTTP/S proxy.", + "fillMode": "none", + "fillSpans": false, + "id": "w-totallatency", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_totallatency_average", + "reduceTo": "avg", + "spaceAggregation": "max", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f001-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f001-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f001-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f001-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-totallatency", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Total Latency", + "yAxisUnit": "ms" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The time calculated from when the request was sent by AFDX edge to the backend until AFDX received the last response byte from the backend.", + "fillMode": "none", + "fillSpans": false, + "id": "w-originlatency", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_originlatency_average", + "reduceTo": "avg", + "spaceAggregation": "max", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f002-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f002-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f002-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f002-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-originlatency", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Origin Latency", + "yAxisUnit": "ms" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The average time taken by a WebSocket connection.", + "fillMode": "none", + "fillSpans": false, + "id": "w-avgwsconnectionduration", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_averagewebsocketconnectionduration_average", + "reduceTo": "avg", + "spaceAggregation": "avg", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f003-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f003-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f003-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f003-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-avgwsconnectionduration", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Avg WebSocket Connection Duration", + "yAxisUnit": "ms" + }, + { + "id": "row-originhealth", + "panelTypes": "row", + "title": "Origin Health" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The percentage of successful health probes from AFDX to backends.", + "fillMode": "none", + "fillSpans": false, + "id": "w-originhealthpercentage", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_originhealthpercentage_average", + "reduceTo": "avg", + "spaceAggregation": "avg", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f004-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f004-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f004-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f004-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-originhealthpercentage", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Origin Health Percentage", + "yAxisUnit": "%" + }, + { + "id": "row-requeststatus", + "panelTypes": "row", + "title": "Request Status" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The ratio of the total bytes served from the cache compared to the total response bytes.", + "fillMode": "none", + "fillSpans": false, + "id": "w-bytehitratio", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_bytehitratio_average", + "reduceTo": "avg", + "spaceAggregation": "avg", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f005-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f005-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f005-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f005-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-bytehitratio", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Byte Hit Ratio", + "yAxisUnit": "%" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The percentage of all client requests for which the response status code is 4XX.", + "fillMode": "none", + "fillSpans": false, + "id": "w-percentage4xx", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_percentage4xx_average", + "reduceTo": "avg", + "spaceAggregation": "max", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f006-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f006-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f006-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f006-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-percentage4xx", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "4XX Error Rate", + "yAxisUnit": "%" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { "linksData": [] }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The percentage of all client requests for which the response status code is 5XX.", + "fillMode": "none", + "fillSpans": false, + "id": "w-percentage5xx", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_percentage5xx_average", + "reduceTo": "avg", + "spaceAggregation": "max", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" + }, + "filters": { + "items": [ + { + "id": "f007-name", + "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, + "op": "IN", + "value": "$cdnprofile" + }, + { + "id": "f007-sub", + "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "f007-rg", + "key": { "id": "resource_group", "key": "resource_group", "type": "" }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "f007-type", + "key": { "id": "type", "key": "type", "type": "" }, + "op": "=", + "value": "Microsoft.Cdn/profiles" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { "expression": "" }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-percentage5xx", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "5XX Error Rate", + "yAxisUnit": "%" + }, + { + "id": "row-traffic", + "panelTypes": "row", + "title": "Traffic" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of client requests served by the HTTP/S proxy.", "fillMode": "none", "fillSpans": false, + "id": "w-requestcount", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_requestcount_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f008-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f008-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f008-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f008-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-requestcount", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "Request Count", "yAxisUnit": "{count}" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of requests sent from AFDX to origin.", "fillMode": "none", "fillSpans": false, + "id": "w-originrequestcount", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_originrequestcount_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f009-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f009-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f009-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f009-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-originrequestcount", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "Origin Request Count", "yAxisUnit": "{count}" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of bytes sent as requests from clients to AFDX.", "fillMode": "none", "fillSpans": false, + "id": "w-requestsize", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_requestsize_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f010-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f010-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f010-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f010-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-requestsize", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "Request Size", "yAxisUnit": "By" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of bytes sent as responses from HTTP/S proxy to clients.", "fillMode": "none", "fillSpans": false, + "id": "w-responsesize", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_responsesize_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f011-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f011-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f011-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f011-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-responsesize", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "Response Size", "yAxisUnit": "By" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of active WebSocket connections.", "fillMode": "none", "fillSpans": false, + "id": "w-activewebsocketconnections", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_activewebsocketconnections_total", "reduceTo": "max", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "max" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f012-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f012-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f012-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f012-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-activewebsocketconnections", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "Active WebSocket Connections", "yAxisUnit": "{count}" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of WebSocket connections requested.", "fillMode": "none", "fillSpans": false, + "id": "w-websocketconnections", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_websocketconnections_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f013-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f013-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f013-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f013-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-websocketconnections", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "WebSocket Connections", "yAxisUnit": "{count}" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of client requests processed by the Web Application Firewall.", "fillMode": "none", "fillSpans": false, + "id": "w-wafrequestcount", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_webapplicationfirewallrequestcount_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f014-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f014-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f014-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f014-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-wafrequestcount", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "WAF Request Count", "yAxisUnit": "{count}" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of CAPTCHA requests evaluated by the Web Application Firewall.", "fillMode": "none", "fillSpans": false, + "id": "w-wafcaptcharequestcount", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_webapplicationfirewallcaptcharequestcount_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f015-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f015-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f015-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f015-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-wafcaptcharequestcount", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "WAF CAPTCHA Request Count", "yAxisUnit": "{count}" + }, + { + "bucketCount": 30, "bucketWidth": 0, "columnUnits": {}, "contextLinks": { "linksData": [] }, "customLegendColors": {}, + "decimalPrecision": 2, "description": "The number of JS challenge requests evaluated by the Web Application Firewall.", "fillMode": "none", "fillSpans": false, + "id": "w-wafjsrequestcount", "isLogScale": false, "legendPosition": "bottom", "lineInterpolation": "spline", "lineStyle": "solid", + "mergeAllActiveQueries": false, "nullZeroValues": "zero", "opacity": "1", "panelTypes": "graph", + "query": { + "builder": { + "queryData": [{ + "aggregations": [{ "metricName": "azure_webapplicationfirewalljsrequestcount_total", "reduceTo": "sum", "spaceAggregation": "sum", "temporality": "", "timeAggregation": "sum" }], + "dataSource": "metrics", "disabled": false, "expression": "A", + "filter": { "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $cdnprofile AND type = 'Microsoft.Cdn/profiles'" }, + "filters": { "items": [ + { "id": "f016-name", "key": { "dataType": "string", "id": "name--string--", "key": "name", "type": "" }, "op": "IN", "value": "$cdnprofile" }, + { "id": "f016-sub", "key": { "id": "azuremonitor.subscription_id", "key": "azuremonitor.subscription_id", "type": "" }, "op": "=", "value": "$azuremonitor.subscription_id" }, + { "id": "f016-rg", "key": { "id": "resource_group", "key": "resource_group", "type": "" }, "op": "=", "value": "$resource_group" }, + { "id": "f016-type", "key": { "id": "type", "key": "type", "type": "" }, "op": "=", "value": "Microsoft.Cdn/profiles" } + ], "op": "AND" }, + "functions": [], "groupBy": [{ "dataType": "string", "id": "name--string--tag", "isColumn": false, "isJSON": false, "key": "name", "type": "tag" }], + "having": { "expression": "" }, "legend": "{{name}}", "limit": null, "orderBy": [], "queryName": "A", "source": "", "stepInterval": null + }], + "queryFormulas": [], "queryTraceOperator": [] + }, + "clickhouse_sql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "id": "q-w-wafjsrequestcount", + "promql": [{ "disabled": false, "legend": "", "name": "A", "query": "" }], + "queryType": "builder", "unit": "" + }, + "selectedLogFields": [ + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "timestamp", "signal": "logs", "type": "log" }, + { "dataType": "", "fieldContext": "log", "fieldDataType": "", "isIndexed": false, "name": "body", "signal": "logs", "type": "log" } + ], + "selectedTracesFields": [ + { "fieldContext": "resource", "fieldDataType": "string", "name": "service.name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "string", "name": "name", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "duration_nano", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "http_method", "signal": "traces" }, + { "fieldContext": "span", "fieldDataType": "", "name": "response_status_code", "signal": "traces" } + ], + "showPoints": false, "softMax": 0, "softMin": 0, "spanGaps": true, "stackedBarChart": false, "thresholds": [], + "timePreferance": "GLOBAL_TIME", "title": "WAF JS Challenge Request Count", "yAxisUnit": "{count}" + } + ] +} diff --git a/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/integration.json b/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/integration.json index 7692a77cd2..49a5f6a98c 100644 --- a/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/integration.json +++ b/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/cdnprofile/integration.json @@ -134,6 +134,24 @@ "unit": "Count", "type": "Gauge", "description": "" + }, + { + "name": "azure_webapplicationfirewallcaptcharequestcount_total", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "azure_webapplicationfirewalljsrequestcount_total", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "azure_websocketconnections_total", + "unit": "Count", + "type": "Gauge", + "description": "" } ], "logs": [ diff --git a/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/storageaccountsblob/assets/dashboards/overview.json b/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/storageaccountsblob/assets/dashboards/overview.json index 0967ef424b..c2e6ae2e14 100644 --- a/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/storageaccountsblob/assets/dashboards/overview.json +++ b/pkg/modules/cloudintegration/implcloudintegration/fs/definitions/azure/storageaccountsblob/assets/dashboards/overview.json @@ -1 +1,1986 @@ -{} +{ + "description": "Out of the box dashboard for Azure Blob Storage.", + "image": "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxOCAxOCI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJhOWM2MjMwNy0xY2QwLTQwMGMtOTExYi0xN2VjNmE5MTEwY2UiIHgxPSI5IiB5MT0iMTUuODM0IiB4Mj0iOSIgeTI9IjUuNzg4IiBncmFkaWVudFVuaXRzPSJ1c2VyU3BhY2VPblVzZSI+PHN0b3Agb2Zmc2V0PSIwIiBzdG9wLWNvbG9yPSIjMzJiZWRkIi8+PHN0b3Agb2Zmc2V0PSIwLjc3NSIgc3RvcC1jb2xvcj0iIzMyZDRmNSIvPjwvbGluZWFyR3JhZGllbnQ+PC9kZWZzPjx0aXRsZT5Nc1BvcnRhbEZ4LmJhc2UuaW1hZ2VzLTc8L3RpdGxlPjxnIGlkPSJmMzFkMjE0ZS1mMDllLTQ5ZTMtYjNkMi03YzVkNTU2ODJkMDkiPjxnPjxwYXRoIGQ9Ik0uNSw1Ljc4OGgxN2EwLDAsMCwwLDEsMCwwdjkuNDc4YS41NjguNTY4LDAsMCwxLS41NjguNTY4SDEuMDY4QS41NjguNTY4LDAsMCwxLC41LDE1LjI2NlY1Ljc4OEEwLDAsMCwwLDEsLjUsNS43ODhaIiBmaWxsPSJ1cmwoI2E5YzYyMzA3LTFjZDAtNDAwYy05MTFiLTE3ZWM2YTkxMTBjZSkiLz48cGF0aCBkPSJNMS4wNzEsMi4xNjZIMTYuOTI5YS41NjguNTY4LDAsMCwxLC41NjguNTY4VjUuNzg4YTAsMCwwLDAsMSwwLDBILjVhMCwwLDAsMCwxLDAsMFYyLjczNEEuNTY4LjU2OCwwLDAsMSwxLjA3MSwyLjE2NloiIGZpbGw9IiMwMDc4ZDQiLz48cmVjdCB4PSIyLjMyOCIgeT0iNy4wNDkiIHdpZHRoPSI2LjI4MSIgaGVpZ2h0PSIzLjQwOCIgcng9IjAuMjgzIiBmaWxsPSIjMDA3OGQ0Ii8+PHJlY3QgeD0iOS4zMzYiIHk9IjcuMDQ5IiB3aWR0aD0iNi4yODEiIGhlaWdodD0iMy40MDgiIHJ4PSIwLjI4MyIgZmlsbD0iI2ZmZiIvPjxyZWN0IHg9IjIuMjk2IiB5PSIxMS4xMjgiIHdpZHRoPSI2LjI4MSIgaGVpZ2h0PSIzLjQwOCIgcng9IjAuMjgzIiBmaWxsPSIjMDA3OGQ0Ii8+PHJlY3QgeD0iOS4zMDQiIHk9IjExLjEyOCIgd2lkdGg9IjYuMjgxIiBoZWlnaHQ9IjMuNDA4IiByeD0iMC4yODMiIGZpbGw9IiMwMDc4ZDQiLz48L2c+PC9nPjwvc3ZnPg==", + "layout": [ + { + "h": 1, + "i": "5905bfb8-2376-4385-8323-e02370c912d5", + "maxH": 1, + "minH": 1, + "minW": 12, + "moved": false, + "static": false, + "w": 12, + "x": 0, + "y": 0 + }, + { + "h": 6, + "i": "44c3d1ac-e374-4018-aecd-69817a4e6651", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 1 + }, + { + "h": 6, + "i": "aefb8124-eee8-467b-b8db-366d92d91096", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 1 + }, + { + "h": 6, + "i": "6b22d520-339e-4e1e-98d0-511f6036a4ff", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 7 + }, + { + "h": 1, + "i": "f07e3d7e-0f0f-49c9-a69c-24ea4d6c9556", + "maxH": 1, + "minH": 1, + "minW": 12, + "moved": false, + "static": false, + "w": 12, + "x": 0, + "y": 13 + }, + { + "h": 6, + "i": "1cced153-1d0d-4081-9d17-981fc241f005", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 14 + }, + { + "h": 6, + "i": "f98fafa7-b4e2-40bd-b2e6-ef2187eef16c", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 14 + }, + { + "h": 6, + "i": "2d4950b8-e9d8-4750-a312-320c954610a6", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 20 + }, + { + "h": 6, + "i": "abc1e732-4fa3-45b0-87ef-d115bc7abdb3", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 20 + }, + { + "h": 6, + "i": "ea31818a-d882-4a19-9bac-09fec3ec608e", + "w": 6, + "x": 0, + "y": 26 + }, + { + "h": 6, + "i": "b73c1da0-73b0-45e8-89c7-7308ffa472bf", + "w": 6, + "x": 6, + "y": 26 + } + ], + "panelMap": { + "5905bfb8-2376-4385-8323-e02370c912d5": { + "collapsed": false, + "widgets": [ + { + "h": 6, + "i": "44c3d1ac-e374-4018-aecd-69817a4e6651", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 1 + }, + { + "h": 6, + "i": "aefb8124-eee8-467b-b8db-366d92d91096", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 1 + }, + { + "h": 6, + "i": "6b22d520-339e-4e1e-98d0-511f6036a4ff", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 7 + } + ] + }, + "f07e3d7e-0f0f-49c9-a69c-24ea4d6c9556": { + "collapsed": false, + "widgets": [ + { + "h": 6, + "i": "1cced153-1d0d-4081-9d17-981fc241f005", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 14 + }, + { + "h": 6, + "i": "f98fafa7-b4e2-40bd-b2e6-ef2187eef16c", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 14 + }, + { + "h": 6, + "i": "2d4950b8-e9d8-4750-a312-320c954610a6", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 20 + }, + { + "h": 6, + "i": "abc1e732-4fa3-45b0-87ef-d115bc7abdb3", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 20 + } + ] + } + }, + "tags": [], + "title": "Azure Blob Storage Overview", + "uploadedGrafana": false, + "variables": { + "26b4bfd5-5100-434a-9e36-b66d989d3830": { + "allSelected": false, + "customValue": "", + "defaultValue": "", + "description": "Name of the storage account", + "dynamicVariablesAttribute": "name", + "dynamicVariablesSource": "Metrics", + "id": "26b4bfd5-5100-434a-9e36-b66d989d3830", + "modificationUUID": "aef85a5f-4e3b-4550-bc80-2c9d6d913a01", + "multiSelect": false, + "name": "storageaccount", + "order": 2, + "queryValue": "", + "showALLOption": true, + "sort": "DISABLED", + "textboxValue": "", + "type": "DYNAMIC" + }, + "500d9daf-d3ab-4bec-bff7-2e716c389958": { + "allSelected": false, + "customValue": "", + "defaultValue": "", + "description": "Subscription ID for your Azure Account", + "dynamicVariablesAttribute": "azuremonitor.subscription_id", + "dynamicVariablesSource": "Metrics", + "id": "500d9daf-d3ab-4bec-bff7-2e716c389958", + "key": "500d9daf-d3ab-4bec-bff7-2e716c389958", + "modificationUUID": "d60e7dbb-5740-4d4a-8a65-9bbb1b8cf4aa", + "multiSelect": false, + "name": "azuremonitor.subscription_id", + "order": 0, + "queryValue": "", + "showALLOption": true, + "sort": "DISABLED", + "textboxValue": "", + "type": "DYNAMIC" + }, + "c41c1e8e-81c3-44f1-a563-1fbc50b19308": { + "allSelected": false, + "customValue": "", + "defaultValue": "", + "description": "Resource Group for your resources", + "dynamicVariablesAttribute": "resource_group", + "dynamicVariablesSource": "Metrics", + "id": "c41c1e8e-81c3-44f1-a563-1fbc50b19308", + "key": "c41c1e8e-81c3-44f1-a563-1fbc50b19308", + "modificationUUID": "71e5473a-381e-4ce0-84c3-470317636a55", + "multiSelect": false, + "name": "resource_group", + "order": 1, + "queryValue": "", + "showALLOption": true, + "sort": "DISABLED", + "textboxValue": "", + "type": "DYNAMIC" + } + }, + "version": "v5", + "widgets": [ + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The amount of storage used by the storage account's Blob service in bytes.", + "fillMode": "none", + "fillSpans": false, + "id": "44c3d1ac-e374-4018-aecd-69817a4e6651", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_blobcapacity_average", + "reduceTo": "avg", + "spaceAggregation": "sum", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "e4fcdf44-7f3a-498a-a940-7e51c0ee0207", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "35a98ed2-e48d-4eb5-80e2-53452199c5a9", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "8c85d60a-985b-474d-b92a-0303fa02ba7d", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "d7f89445-ab8b-463e-8716-c96037c95abb", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Blob Capacity", + "yAxisUnit": "By" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The number of blob objects stored in the storage account.", + "fillMode": "none", + "fillSpans": false, + "id": "6b22d520-339e-4e1e-98d0-511f6036a4ff", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_blobcount_average", + "reduceTo": "avg", + "spaceAggregation": "avg", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "d14f31c6-ad28-43cc-91a3-f7c18231c7e6", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "246f20e4-558c-4a69-9488-6fc3b0824264", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "a2a4fa45-7b95-4bd8-8737-1109215c489d", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "4fef30d3-4c91-404c-a8cb-402eb542d2f4", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Blob Count", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The number of containers in the storage account.", + "fillMode": "none", + "fillSpans": false, + "id": "aefb8124-eee8-467b-b8db-366d92d91096", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_containercount_average", + "reduceTo": "avg", + "spaceAggregation": "avg", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "08a10d11-770a-4518-9eed-1cede51180cc", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "7a1fe360-f53f-486a-9581-8f975aa9dfd8", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "3864e736-3761-4b6b-8715-5dba6f9eae53", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "742f960e-e60a-4409-a897-73612116ca0b", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Blob Container Count", + "yAxisUnit": "{count}" + }, + { + "description": "", + "id": "f07e3d7e-0f0f-49c9-a69c-24ea4d6c9556", + "panelTypes": "row", + "title": "Transaction" + }, + { + "description": "", + "id": "5905bfb8-2376-4385-8323-e02370c912d5", + "panelTypes": "row", + "title": "Capacity" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The percentage of availability for the storage service or the specified API operation.", + "fillMode": "none", + "fillSpans": false, + "id": "1cced153-1d0d-4081-9d17-981fc241f005", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_availability_average", + "reduceTo": "avg", + "spaceAggregation": "min", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "2e256002-761e-46da-ab9a-8dc0f33ac187", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "cb12f76b-e16e-4d92-aa88-d1bccf1fe9c0", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "fb865049-0636-4851-9da5-3c61ddff26a2", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "name--string--tag", + "isColumn": false, + "isJSON": false, + "key": "name", + "type": "tag" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "a818dcca-59c8-4aaf-ad98-a31bbf2cab95", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Availability", + "yAxisUnit": "%" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The amount of egress data. This number includes egress to external client from Azure Storage as well as egress within Azure. As a result, this number does not reflect billable egress.", + "fillMode": "none", + "fillSpans": false, + "id": "f98fafa7-b4e2-40bd-b2e6-ef2187eef16c", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_egress_average", + "reduceTo": "sum", + "spaceAggregation": "sum", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "9716a7c9-ed42-4921-bad8-c5fb6b68fef2", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "2a7a68bf-9033-4494-b5c7-e82af92423e2", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "c9ea764d-b6af-4e93-b097-093ff562cc8e", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "", + "id": "name----", + "key": "name", + "type": "" + } + ], + "having": { + "expression": "" + }, + "legend": "name = {{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "aa8fb4f1-cf1c-4627-a5ef-bc8ebddc22f0", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Egress", + "yAxisUnit": "By" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The amount of ingress data, in bytes. This number includes ingress from an external client into Azure Storage as well as ingress within Azure.", + "fillMode": "none", + "fillSpans": false, + "id": "2d4950b8-e9d8-4750-a312-320c954610a6", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_ingress_average", + "reduceTo": "sum", + "spaceAggregation": "sum", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "c5bb4e0b-f88a-4041-85f7-cbc07b5073bc", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "2b3de77d-2fde-465d-ba7e-9d7be8b6853a", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "adf659d5-6609-4720-98d0-4129321b63f7", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "", + "id": "name----", + "key": "name", + "type": "" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "aa8fb4f1-cf1c-4627-a5ef-bc8ebddc22f0", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Ingress", + "yAxisUnit": "By" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The average end-to-end latency of successful requests made to a storage service or the specified API operation, in milliseconds. This value includes the required processing time within Azure Storage to read the request, send the response, and receive acknowledgment of the response.", + "fillMode": "none", + "fillSpans": false, + "id": "abc1e732-4fa3-45b0-87ef-d115bc7abdb3", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_successe2elatency_average", + "reduceTo": "avg", + "spaceAggregation": "max", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "725192d6-cdfa-466f-87b3-960980c26bc4", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "301cb143-a872-4b17-b335-cb638d8d7022", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "22b7d4ca-56b5-4bfc-b123-0f4988a961d0", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "", + "id": "name----", + "key": "name", + "type": "" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "aa8fb4f1-cf1c-4627-a5ef-bc8ebddc22f0", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Success E2E Latency", + "yAxisUnit": "ms" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The average time used to process a successful request by Azure Storage. This value does not include the network latency specified in SuccessE2ELatency.", + "fillMode": "none", + "fillSpans": false, + "id": "ea31818a-d882-4a19-9bac-09fec3ec608e", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_successserverlatency_average", + "reduceTo": "avg", + "spaceAggregation": "max", + "temporality": "", + "timeAggregation": "avg" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "cccb9c11-9e54-40e3-9b75-21a753f0651a", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "32b7a033-1da0-43da-83cf-0beb7fb6eb5d", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "77d3f853-9720-46a6-ac71-405ccd2d16e1", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "", + "id": "name----", + "key": "name", + "type": "" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "aa8fb4f1-cf1c-4627-a5ef-bc8ebddc22f0", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Success Server Latency", + "yAxisUnit": "ms" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "contextLinks": { + "linksData": [] + }, + "customLegendColors": {}, + "decimalPrecision": 2, + "description": "The number of requests made to a storage service or the specified API operation. This number includes successful and failed requests, as well as requests which produced errors. Use ResponseType dimension for the number of different type of response.", + "fillMode": "none", + "fillSpans": false, + "id": "b73c1da0-73b0-45e8-89c7-7308ffa472bf", + "isLogScale": false, + "legendPosition": "bottom", + "lineInterpolation": "spline", + "lineStyle": "solid", + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregations": [ + { + "metricName": "azure_transactions_total", + "reduceTo": "sum", + "spaceAggregation": "sum", + "temporality": "", + "timeAggregation": "sum" + } + ], + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filter": { + "expression": "azuremonitor.subscription_id = $azuremonitor.subscription_id AND resource_group = $resource_group AND name IN $storageaccount AND type = 'Microsoft.Storage/storageAccounts/blobServices'" + }, + "filters": { + "items": [ + { + "id": "8ad4d515-5e1e-440c-870b-05315f919297", + "key": { + "dataType": "string", + "id": "name--string--", + "key": "name", + "type": "" + }, + "op": "IN", + "value": "$storageaccount" + }, + { + "id": "bbef20d9-4b62-49a2-b522-079f814406ca", + "key": { + "id": "azuremonitor.subscription_id", + "key": "azuremonitor.subscription_id", + "type": "" + }, + "op": "=", + "value": "$azuremonitor.subscription_id" + }, + { + "id": "ffbdf757-11b8-45d1-8190-50b4ec067839", + "key": { + "id": "resource_group", + "key": "resource_group", + "type": "" + }, + "op": "=", + "value": "$resource_group" + }, + { + "id": "65021cbd-add8-44c3-b5a3-16e1c582ccbb", + "key": { + "id": "type", + "key": "type", + "type": "" + }, + "op": "=", + "value": "Microsoft.Storage/storageAccounts/blobServices" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "", + "id": "name----", + "key": "name", + "type": "" + } + ], + "having": { + "expression": "" + }, + "legend": "{{name}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "source": "", + "stepInterval": null + } + ], + "queryFormulas": [], + "queryTraceOperator": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "4eb41b96-d18f-4082-b03e-70606c7544f4", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder", + "unit": "" + }, + "selectedLogFields": [ + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "timestamp", + "signal": "logs", + "type": "log" + }, + { + "dataType": "", + "fieldContext": "log", + "fieldDataType": "", + "isIndexed": false, + "name": "body", + "signal": "logs", + "type": "log" + } + ], + "selectedTracesFields": [ + { + "fieldContext": "resource", + "fieldDataType": "string", + "name": "service.name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "string", + "name": "name", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "duration_nano", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "http_method", + "signal": "traces" + }, + { + "fieldContext": "span", + "fieldDataType": "", + "name": "response_status_code", + "signal": "traces" + } + ], + "showPoints": false, + "softMax": 0, + "softMin": 0, + "spanGaps": true, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Transactions", + "yAxisUnit": "{count}" + } + ], + "uuid": "019dba4f-c877-7dba-8a33-4f3eebe06b00" +} From c230fbcbc243f4e5e550a1c2b74403b5b1aa90d7 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Tue, 28 Apr 2026 23:22:27 +0530 Subject: [PATCH 13/19] fix: implement tag filter mapping to canonical and deprecated operators (#11124) Co-authored-by: Piyush Singariya --- .../PipelineListsView/PipelineListsView.tsx | 26 ++++- .../PipelinePage/PipelineListsView/utils.tsx | 110 ++++++++++++++++++ 2 files changed, 131 insertions(+), 5 deletions(-) diff --git a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx index 0a4a19b98c..24265ae2ee 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx @@ -51,6 +51,8 @@ import { getElementFromArray, getRecordIndex, getTableColumn, + mapTagFilterToCanonicalOperators, + mapTagFilterToDeprecatedOperators, getUpdatedRow, } from './utils'; @@ -104,10 +106,16 @@ function PipelineListsView({ const { notifications } = useNotifications(); const [pipelineSearchValue, setPipelineSearchValue] = useState(''); const [prevPipelineData, setPrevPipelineData] = useState>( - cloneDeep(pipelineData?.pipelines || []), + cloneDeep(pipelineData?.pipelines || []).map((p) => ({ + ...p, + filter: mapTagFilterToCanonicalOperators(p.filter), + })), ); const [currPipelineData, setCurrPipelineData] = useState>( - cloneDeep(pipelineData?.pipelines || []), + cloneDeep(pipelineData?.pipelines || []).map((p) => ({ + ...p, + filter: mapTagFilterToCanonicalOperators(p.filter), + })), ); const [expandedPipelineId, setExpandedPipelineId] = useState< @@ -179,7 +187,10 @@ function PipelineListsView({ (record: PipelineData) => (): void => { setActionType(ActionType.EditPipeline); setSelectedPipelineData(record); - pipelineForm.setFieldsValue(record); + pipelineForm.setFieldsValue({ + ...record, + filter: mapTagFilterToCanonicalOperators(record.filter), + }); }, [setActionType, pipelineForm], ); @@ -438,6 +449,7 @@ function PipelineListsView({ const modifiedPipelineData = currPipelineData.map((item: PipelineData) => { const pipelineData = { ...item }; delete pipelineData?.id; + pipelineData.filter = mapTagFilterToDeprecatedOperators(pipelineData.filter); return pipelineData; }); try { @@ -449,8 +461,12 @@ function PipelineListsView({ setShowSaveButton(undefined); const pipelinesInDB = response.data?.pipelines || []; - setCurrPipelineData(pipelinesInDB); - setPrevPipelineData(pipelinesInDB); + const canonicalPipelinesInDB = pipelinesInDB.map((p) => ({ + ...p, + filter: mapTagFilterToCanonicalOperators(p.filter), + })); + setCurrPipelineData(canonicalPipelinesInDB); + setPrevPipelineData(canonicalPipelinesInDB); // Log modified JSON flattening configurations const modifiedConfigs = getModifiedJsonFlatteningConfigs(); diff --git a/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx b/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx index 1ceb134787..18b6cbcb85 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/utils.tsx @@ -1,12 +1,122 @@ import update from 'react-addons-update'; import { TableColumnType as ColumnType } from 'antd'; +import { DEPRECATED_OPERATORS } from 'constants/antlrQueryConstants'; import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats'; import dayjs from 'dayjs'; import { cloneDeep } from 'lodash-es'; +import { TagFilter } from 'types/api/queryBuilder/queryBuilderData'; import { ProcessorData } from 'types/api/pipeline/def'; import TableComponents, { Record } from './TableComponents'; +function toCanonicalFilterOperator(op: string): string { + const normalized = op.trim(); + if (!normalized) { + return normalized; + } + + const lower = normalized.toLowerCase(); + + // Deprecated → canonical (UI/operator-picker format) + switch (lower) { + case DEPRECATED_OPERATORS.NIN: + return 'not in'; + case DEPRECATED_OPERATORS.NREGEX: + return 'not regex'; + case DEPRECATED_OPERATORS.NLIKE: + return 'not like'; + case DEPRECATED_OPERATORS.NILIKE: + return 'not ilike'; + case DEPRECATED_OPERATORS.NEXTISTS: + return 'not exists'; + case DEPRECATED_OPERATORS.NCONTAINS: + return 'not contains'; + case DEPRECATED_OPERATORS.NHAS: + return 'not has'; + case DEPRECATED_OPERATORS.NHASANY: + return 'not hasAny'; + case DEPRECATED_OPERATORS.NHASALL: + return 'not hasAll'; + case DEPRECATED_OPERATORS.REGEX: + return 'regex'; + default: + return normalized; + } +} + +export function mapTagFilterToCanonicalOperators(filter: TagFilter): TagFilter { + if (!filter?.items?.length) { + return filter; + } + + let changed = false; + const items = filter.items.map((item) => { + const nextOp = toCanonicalFilterOperator(item.op || ''); + if (nextOp !== item.op) { + changed = true; + return { ...item, op: nextOp }; + } + return item; + }); + + return changed ? { ...filter, items } : filter; +} + +function toDeprecatedFilterOperator(op: string): string { + const normalized = op.trim(); + if (!normalized) { + return normalized; + } + + const lower = normalized.toLowerCase(); + + // Canonical → deprecated (only where a deprecated token exists) + switch (lower) { + case 'not in': + return DEPRECATED_OPERATORS.NIN; + case 'not regex': + return DEPRECATED_OPERATORS.NREGEX; + case 'not like': + return DEPRECATED_OPERATORS.NLIKE; + case 'not ilike': + return DEPRECATED_OPERATORS.NILIKE; + case 'not exists': + return DEPRECATED_OPERATORS.NEXTISTS; + case 'not contains': + return DEPRECATED_OPERATORS.NCONTAINS; + case 'not has': + return DEPRECATED_OPERATORS.NHAS; + case 'not hasany': + return DEPRECATED_OPERATORS.NHASANY; + case 'not hasall': + return DEPRECATED_OPERATORS.NHASALL; + case 'regex': + return DEPRECATED_OPERATORS.REGEX; + default: + return normalized; + } +} + +export function mapTagFilterToDeprecatedOperators( + filter: TagFilter, +): TagFilter { + if (!filter?.items?.length) { + return filter; + } + + let changed = false; + const items = filter.items.map((item) => { + const nextOp = toDeprecatedFilterOperator(item.op || ''); + if (nextOp !== item.op) { + changed = true; + return { ...item, op: nextOp }; + } + return item; + }); + + return changed ? { ...filter, items } : filter; +} + export function getElementFromArray( arr: Array, target: T, From 705ff439c486809e53a93a71da5ae8df4251e457 Mon Sep 17 00:00:00 2001 From: Prakhar Dewan <33227141+prakha@users.noreply.github.com> Date: Wed, 29 Apr 2026 02:07:55 +0530 Subject: [PATCH 14/19] chore: remove unused api files (#11041) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Vinicius Lourenço <12551007+H4ad@users.noreply.github.com> --- .../src/api/dashboard/queryRangeFormat.ts | 15 ------- .../api/dynamicConfigs/getDynamicConfigs.ts | 24 ---------- .../src/api/infra/getHostAttributeKeys.ts | 44 ------------------- .../getInfraAttributeValues.ts | 40 ----------------- frontend/src/api/metrics/getDBOverView.ts | 26 ----------- .../api/metrics/getExternalAverageDuration.ts | 29 ------------ frontend/src/api/metrics/getExternalError.ts | 26 ----------- .../src/api/metrics/getExternalService.ts | 26 ----------- .../src/api/metrics/getServiceOverview.ts | 16 ------- frontend/src/api/trace/getTraceItem.ts | 32 -------------- .../src/api/v1/org/preferences/name/get.ts | 25 ----------- frontend/src/api/widgets/getQuery.ts | 26 ----------- 12 files changed, 329 deletions(-) delete mode 100644 frontend/src/api/dashboard/queryRangeFormat.ts delete mode 100644 frontend/src/api/dynamicConfigs/getDynamicConfigs.ts delete mode 100644 frontend/src/api/infra/getHostAttributeKeys.ts delete mode 100644 frontend/src/api/infraMonitoring/getInfraAttributeValues.ts delete mode 100644 frontend/src/api/metrics/getDBOverView.ts delete mode 100644 frontend/src/api/metrics/getExternalAverageDuration.ts delete mode 100644 frontend/src/api/metrics/getExternalError.ts delete mode 100644 frontend/src/api/metrics/getExternalService.ts delete mode 100644 frontend/src/api/metrics/getServiceOverview.ts delete mode 100644 frontend/src/api/trace/getTraceItem.ts delete mode 100644 frontend/src/api/v1/org/preferences/name/get.ts delete mode 100644 frontend/src/api/widgets/getQuery.ts diff --git a/frontend/src/api/dashboard/queryRangeFormat.ts b/frontend/src/api/dashboard/queryRangeFormat.ts deleted file mode 100644 index 02e020bfb5..0000000000 --- a/frontend/src/api/dashboard/queryRangeFormat.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { ApiV3Instance as axios } from 'api'; -import { ApiResponse } from 'types/api'; -import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; -import { QueryRangePayload } from 'types/api/metrics/getQueryRange'; - -interface IQueryRangeFormat { - compositeQuery: ICompositeMetricQuery; -} - -export const getQueryRangeFormat = ( - props?: Partial, -): Promise => - axios - .post>('/query_range/format', props) - .then((res) => res.data.data); diff --git a/frontend/src/api/dynamicConfigs/getDynamicConfigs.ts b/frontend/src/api/dynamicConfigs/getDynamicConfigs.ts deleted file mode 100644 index 149c113119..0000000000 --- a/frontend/src/api/dynamicConfigs/getDynamicConfigs.ts +++ /dev/null @@ -1,24 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps } from 'types/api/dynamicConfigs/getDynamicConfigs'; - -const getDynamicConfigs = async (): Promise< - SuccessResponse | ErrorResponse -> => { - try { - const response = await axios.get(`/configs`); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getDynamicConfigs; diff --git a/frontend/src/api/infra/getHostAttributeKeys.ts b/frontend/src/api/infra/getHostAttributeKeys.ts deleted file mode 100644 index de66be23d1..0000000000 --- a/frontend/src/api/infra/getHostAttributeKeys.ts +++ /dev/null @@ -1,44 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError, AxiosResponse } from 'axios'; -import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder'; -import { InfraMonitoringEntity } from 'container/InfraMonitoringK8s/constants'; -import { createIdFromObjectFields } from 'lib/createIdFromObjectFields'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { - BaseAutocompleteData, - IQueryAutocompleteResponse, -} from 'types/api/queryBuilder/queryAutocompleteResponse'; - -export const getHostAttributeKeys = async ( - searchText = '', - entity: InfraMonitoringEntity, -): Promise | ErrorResponse> => { - try { - const response: AxiosResponse<{ - data: IQueryAutocompleteResponse; - }> = await axios.get( - `/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`, - { - params: { - limit: 500, - }, - }, - ); - - const payload: BaseAutocompleteData[] = - response.data.data.attributeKeys?.map(({ id: _, ...item }) => ({ - ...item, - id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder), - })) || []; - - return { - statusCode: 200, - error: null, - message: response.statusText, - payload: { attributeKeys: payload }, - }; - } catch (e) { - return ErrorResponseHandler(e as AxiosError); - } -}; diff --git a/frontend/src/api/infraMonitoring/getInfraAttributeValues.ts b/frontend/src/api/infraMonitoring/getInfraAttributeValues.ts deleted file mode 100644 index 0e6b17ce11..0000000000 --- a/frontend/src/api/infraMonitoring/getInfraAttributeValues.ts +++ /dev/null @@ -1,40 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import createQueryParams from 'lib/createQueryParams'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { - IAttributeValuesResponse, - IGetAttributeValuesPayload, -} from 'types/api/queryBuilder/getAttributesValues'; - -export const getInfraAttributesValues = async ({ - dataSource, - attributeKey, - filterAttributeKeyDataType, - tagType, - searchText, - aggregateAttribute, -}: IGetAttributeValuesPayload): Promise< - SuccessResponse | ErrorResponse -> => { - try { - const response = await axios.get( - `/hosts/attribute_values?${createQueryParams({ - dataSource, - attributeKey, - searchText, - aggregateAttribute, - })}&filterAttributeKeyDataType=${filterAttributeKeyDataType}&tagType=${tagType}`, - ); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; diff --git a/frontend/src/api/metrics/getDBOverView.ts b/frontend/src/api/metrics/getDBOverView.ts deleted file mode 100644 index 7afd56d75d..0000000000 --- a/frontend/src/api/metrics/getDBOverView.ts +++ /dev/null @@ -1,26 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/metrics/getDBOverview'; - -const getDBOverView = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get( - `/service/dbOverview?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, - ); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getDBOverView; diff --git a/frontend/src/api/metrics/getExternalAverageDuration.ts b/frontend/src/api/metrics/getExternalAverageDuration.ts deleted file mode 100644 index 51be375d94..0000000000 --- a/frontend/src/api/metrics/getExternalAverageDuration.ts +++ /dev/null @@ -1,29 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { - PayloadProps, - Props, -} from 'types/api/metrics/getExternalAverageDuration'; - -const getExternalAverageDuration = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get( - `/service/externalAvgDuration?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, - ); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getExternalAverageDuration; diff --git a/frontend/src/api/metrics/getExternalError.ts b/frontend/src/api/metrics/getExternalError.ts deleted file mode 100644 index 3587639bb9..0000000000 --- a/frontend/src/api/metrics/getExternalError.ts +++ /dev/null @@ -1,26 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/metrics/getExternalError'; - -const getExternalError = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get( - `/service/externalErrors?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, - ); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getExternalError; diff --git a/frontend/src/api/metrics/getExternalService.ts b/frontend/src/api/metrics/getExternalService.ts deleted file mode 100644 index de9bf65173..0000000000 --- a/frontend/src/api/metrics/getExternalService.ts +++ /dev/null @@ -1,26 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/metrics/getExternalService'; - -const getExternalService = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get( - `/service/external?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, - ); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getExternalService; diff --git a/frontend/src/api/metrics/getServiceOverview.ts b/frontend/src/api/metrics/getServiceOverview.ts deleted file mode 100644 index 47febaa3d8..0000000000 --- a/frontend/src/api/metrics/getServiceOverview.ts +++ /dev/null @@ -1,16 +0,0 @@ -import axios from 'api'; -import { PayloadProps, Props } from 'types/api/metrics/getServiceOverview'; - -const getServiceOverview = async (props: Props): Promise => { - const response = await axios.post(`/service/overview`, { - start: `${props.start}`, - end: `${props.end}`, - service: props.service, - step: props.step, - tags: props.selectedTags, - }); - - return response.data; -}; - -export default getServiceOverview; diff --git a/frontend/src/api/trace/getTraceItem.ts b/frontend/src/api/trace/getTraceItem.ts deleted file mode 100644 index 054c809b33..0000000000 --- a/frontend/src/api/trace/getTraceItem.ts +++ /dev/null @@ -1,32 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { formUrlParams } from 'container/TraceDetail/utils'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { GetTraceItemProps, PayloadProps } from 'types/api/trace/getTraceItem'; - -const getTraceItem = async ( - props: GetTraceItemProps, -): Promise | ErrorResponse> => { - try { - const response = await axios.request({ - url: `/traces/${props.id}${formUrlParams({ - spanId: props.spanId, - levelUp: props.levelUp, - levelDown: props.levelDown, - })}`, - method: 'get', - }); - - return { - statusCode: 200, - error: null, - message: 'Success', - payload: response.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getTraceItem; diff --git a/frontend/src/api/v1/org/preferences/name/get.ts b/frontend/src/api/v1/org/preferences/name/get.ts deleted file mode 100644 index 48e746d2a1..0000000000 --- a/frontend/src/api/v1/org/preferences/name/get.ts +++ /dev/null @@ -1,25 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; -import { AxiosError } from 'axios'; -import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { PayloadProps, Props } from 'types/api/preferences/get'; -import { OrgPreference } from 'types/api/preferences/preference'; - -const getPreference = async ( - props: Props, -): Promise> => { - try { - const response = await axios.get( - `/org/preferences/${props.name}`, - ); - - return { - httpStatusCode: response.status, - data: response.data.data, - }; - } catch (error) { - ErrorResponseHandlerV2(error as AxiosError); - } -}; - -export default getPreference; diff --git a/frontend/src/api/widgets/getQuery.ts b/frontend/src/api/widgets/getQuery.ts deleted file mode 100644 index a706db3b77..0000000000 --- a/frontend/src/api/widgets/getQuery.ts +++ /dev/null @@ -1,26 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/widgets/getQuery'; - -const getQuery = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get( - `/query_range?query=${props.query}&start=${props.start}&end=${props.end}&step=${props.step}`, - ); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default getQuery; From 4df96ae5ddd9c25c5ba004d25dfe756f8c76b878 Mon Sep 17 00:00:00 2001 From: Prakhar Dewan <33227141+prakha@users.noreply.github.com> Date: Wed, 29 Apr 2026 02:36:56 +0530 Subject: [PATCH 15/19] chore: remove unused assets files (#11045) --- .../src/assets/CustomIcons/GrafanaIcon.tsx | 18 - .../src/assets/CustomIcons/JuiceBoxIcon.tsx | 82 --- .../src/assets/CustomIcons/MagicBallIcon.tsx | 38 -- frontend/src/assets/CustomIcons/TentIcon.tsx | 110 ---- frontend/src/assets/Dashboard/BarIcon.tsx | 41 -- frontend/src/assets/Dashboard/List.tsx | 30 -- frontend/src/assets/Dashboard/Table.tsx | 48 -- frontend/src/assets/Dashboard/TimeSeries.tsx | 69 --- frontend/src/assets/Dashboard/Value.tsx | 32 -- frontend/src/assets/SomethingWentWrong.tsx | 468 ------------------ 10 files changed, 936 deletions(-) delete mode 100644 frontend/src/assets/CustomIcons/GrafanaIcon.tsx delete mode 100644 frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx delete mode 100644 frontend/src/assets/CustomIcons/MagicBallIcon.tsx delete mode 100644 frontend/src/assets/CustomIcons/TentIcon.tsx delete mode 100644 frontend/src/assets/Dashboard/BarIcon.tsx delete mode 100644 frontend/src/assets/Dashboard/List.tsx delete mode 100644 frontend/src/assets/Dashboard/Table.tsx delete mode 100644 frontend/src/assets/Dashboard/TimeSeries.tsx delete mode 100644 frontend/src/assets/Dashboard/Value.tsx delete mode 100644 frontend/src/assets/SomethingWentWrong.tsx diff --git a/frontend/src/assets/CustomIcons/GrafanaIcon.tsx b/frontend/src/assets/CustomIcons/GrafanaIcon.tsx deleted file mode 100644 index c1949dbb95..0000000000 --- a/frontend/src/assets/CustomIcons/GrafanaIcon.tsx +++ /dev/null @@ -1,18 +0,0 @@ -function GrafanaIcon(): JSX.Element { - return ( - - - - ); -} - -export default GrafanaIcon; diff --git a/frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx b/frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx deleted file mode 100644 index 103a48b493..0000000000 --- a/frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx +++ /dev/null @@ -1,82 +0,0 @@ -function JuiceBoxIcon(): JSX.Element { - return ( - - - - - - - - - - - - - - - - - - - - ); -} - -export default JuiceBoxIcon; diff --git a/frontend/src/assets/CustomIcons/MagicBallIcon.tsx b/frontend/src/assets/CustomIcons/MagicBallIcon.tsx deleted file mode 100644 index e37e4bb540..0000000000 --- a/frontend/src/assets/CustomIcons/MagicBallIcon.tsx +++ /dev/null @@ -1,38 +0,0 @@ -function MagicBallIcon(): JSX.Element { - return ( - - - - - - - - - ); -} - -export default MagicBallIcon; diff --git a/frontend/src/assets/CustomIcons/TentIcon.tsx b/frontend/src/assets/CustomIcons/TentIcon.tsx deleted file mode 100644 index 9271324fae..0000000000 --- a/frontend/src/assets/CustomIcons/TentIcon.tsx +++ /dev/null @@ -1,110 +0,0 @@ -function TentIcon(): JSX.Element { - return ( - - - - - - - - - - - - - - - - - - - - - - - - - - - ); -} - -export default TentIcon; diff --git a/frontend/src/assets/Dashboard/BarIcon.tsx b/frontend/src/assets/Dashboard/BarIcon.tsx deleted file mode 100644 index b8e6b3c52f..0000000000 --- a/frontend/src/assets/Dashboard/BarIcon.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import { CSSProperties } from 'react'; - -function BarIcon({ - fillColor, -}: { - fillColor: CSSProperties['color']; -}): JSX.Element { - return ( - - - - - - ); -} - -export default BarIcon; diff --git a/frontend/src/assets/Dashboard/List.tsx b/frontend/src/assets/Dashboard/List.tsx deleted file mode 100644 index 1c4d1d04a9..0000000000 --- a/frontend/src/assets/Dashboard/List.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import { CSSProperties } from 'react'; - -function ListIcon({ - fillColor, -}: { - fillColor: CSSProperties['color']; -}): JSX.Element { - return ( - - - - - - - - - ); -} - -export default ListIcon; diff --git a/frontend/src/assets/Dashboard/Table.tsx b/frontend/src/assets/Dashboard/Table.tsx deleted file mode 100644 index 60effdbfc0..0000000000 --- a/frontend/src/assets/Dashboard/Table.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import { CSSProperties } from 'react'; - -function TableIcon({ - fillColor, -}: { - fillColor: CSSProperties['color']; -}): JSX.Element { - return ( - - - - - - - ); -} - -export default TableIcon; diff --git a/frontend/src/assets/Dashboard/TimeSeries.tsx b/frontend/src/assets/Dashboard/TimeSeries.tsx deleted file mode 100644 index afa9b5f095..0000000000 --- a/frontend/src/assets/Dashboard/TimeSeries.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import { CSSProperties } from 'react'; - -function TimeSeries({ - fillColor, -}: { - fillColor: CSSProperties['color']; -}): JSX.Element { - return ( - - - - - - - - - - ); -} - -export default TimeSeries; diff --git a/frontend/src/assets/Dashboard/Value.tsx b/frontend/src/assets/Dashboard/Value.tsx deleted file mode 100644 index 39ef8d9f44..0000000000 --- a/frontend/src/assets/Dashboard/Value.tsx +++ /dev/null @@ -1,32 +0,0 @@ -import { CSSProperties } from 'react'; - -function Value({ - fillColor, -}: { - fillColor: CSSProperties['color']; -}): JSX.Element { - return ( - - - - - ); -} - -export default Value; diff --git a/frontend/src/assets/SomethingWentWrong.tsx b/frontend/src/assets/SomethingWentWrong.tsx deleted file mode 100644 index e6b0d30a3a..0000000000 --- a/frontend/src/assets/SomethingWentWrong.tsx +++ /dev/null @@ -1,468 +0,0 @@ -function SomethingWentWrong(): JSX.Element { - return ( - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ); -} - -export default SomethingWentWrong; From e78dfc162263d429e0bf852cb19fe22373d87e1f Mon Sep 17 00:00:00 2001 From: Yunus M Date: Wed, 29 Apr 2026 09:28:38 +0530 Subject: [PATCH 16/19] Azure service integration UI (#11117) * feat: azure integration - ui refactor * feat: implement AWS cloud account integration UI components and connection handling * feat: add Azure cloud account integration UI components and connection handling * feat: enhance Azure cloud account setup UI with prerequisites and accordion for how it works section * feat: enhance styling for Azure and AWS account management * refactor: clean up state initialization and destructuring in AWS and HeroSection components * fix: update import path for ServiceDashboards in S3Sync test * feat: add Denmark East region to Azure regions and enhance Azure account removal messaging * chore: remove prefer-signoz-ui-icons ESLint rule and update telemetry event naming --- .../CodeBlock/CodeBlock.module.scss | 22 ++ .../components/CodeBlock/CodeBlock.test.tsx | 46 +++ .../src/components/CodeBlock/CodeBlock.tsx | 89 +++++ .../components => }/AlertMessage.tsx | 23 +- .../AccountActions.style.scss | 6 + .../AccountActions.tsx | 133 +++++-- .../CloudAccountSetupModal.style.scss | 346 +++++++++++++++++ .../CloudAccountSetupModal.tsx | 6 +- .../AccountSettingsModal.style.scss | 0 .../AccountSettingsModal.tsx | 15 +- .../HeroSection/HeroSection.tsx | 28 -- .../CloudAccountSetupModal.style.scss | 180 --------- .../IntegrateNowFormSections.tsx | 2 +- .../components => RegionForm}/RegionForm.tsx | 15 +- .../RegionSelector.style.scss | 0 .../RegionSelector.tsx | 0 .../RenderConnectionParams.tsx | 0 .../S3BucketsSelector/S3BucketsSelector.tsx | 2 +- .../ServiceDetails/ServiceDetails.tsx | 176 ++++++--- .../AmazonWebServices/ServicesTabs.tsx | 29 -- .../__tests__/ServiceDetailsS3Sync.test.tsx | 2 +- .../AmazonWebServices/__tests__/utils.tsx | 4 +- .../mapAwsCloudAccountFromDto.ts | 25 -- .../AddNewAccount/CloudAccountSetupModal.tsx | 356 ++++++++++++++++++ .../EditAccount/AccountSettingsModal.tsx | 150 ++++++++ .../CloudIntegration/CloudIntegration.tsx | 7 +- .../CloudIntegration/Header/Header.tsx | 39 +- .../RemoveIntegrationAccount.scss | 9 +- .../RemoveIntegrationAccount.tsx | 41 +- .../ServiceDashboards.styles.scss | 0 .../ServiceDashboards/ServiceDashboards.tsx | 0 .../{AmazonWebServices => }/ServiceItem.tsx | 2 +- .../ServicesTabs.style.scss | 0 .../ServiceTabs/ServicesTabs.tsx | 30 ++ .../{AmazonWebServices => }/ServicesList.tsx | 9 +- .../mapCloudAccountFromDto.ts | 49 +++ .../Integrations/CloudIntegration/utils.ts | 27 ++ .../HeroSection/HeroSection.style.scss | 0 .../Integrations/HeroSection/HeroSection.tsx | 33 ++ .../HeroSection/types.ts | 0 .../IntegrationDetailPage.styles.scss | 77 ++-- .../IntegrationDetailPage.tsx | 30 +- .../OneClickIntegrations.tsx | 1 + .../src/container/Integrations/constants.ts | 7 +- frontend/src/container/Integrations/types.ts | 5 +- .../Integrations/useGetIntegrationStatus.ts | 9 +- .../integration/aws/useIntegrationModal.ts | 4 +- .../azure/useAccountSettingsModal.ts | 142 +++++++ .../integration/azure/useIntegrationModal.ts | 188 +++++++++ 49 files changed, 1882 insertions(+), 482 deletions(-) create mode 100644 frontend/src/components/CodeBlock/CodeBlock.module.scss create mode 100644 frontend/src/components/CodeBlock/CodeBlock.test.tsx create mode 100644 frontend/src/components/CodeBlock/CodeBlock.tsx rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices/HeroSection/components => }/AlertMessage.tsx (64%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => AccountActions}/AccountActions.style.scss (97%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => AccountActions}/AccountActions.tsx (60%) create mode 100644 frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AddNewAccount/CloudAccountSetupModal.style.scss rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => AddNewAccount}/CloudAccountSetupModal.tsx (97%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => EditAccount}/AccountSettingsModal.style.scss (100%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => EditAccount}/AccountSettingsModal.tsx (92%) delete mode 100644 frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/HeroSection.tsx delete mode 100644 frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/CloudAccountSetupModal.style.scss rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => }/IntegrateNowFormSections.tsx (97%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => RegionForm}/RegionForm.tsx (89%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => RegionForm}/RegionSelector.style.scss (100%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => RegionForm}/RegionSelector.tsx (100%) rename frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/{HeroSection/components => RegionForm}/RenderConnectionParams.tsx (100%) delete mode 100644 frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServicesTabs.tsx delete mode 100644 frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/mapAwsCloudAccountFromDto.ts create mode 100644 frontend/src/container/Integrations/CloudIntegration/AzureCloudServices/AddNewAccount/CloudAccountSetupModal.tsx create mode 100644 frontend/src/container/Integrations/CloudIntegration/AzureCloudServices/EditAccount/AccountSettingsModal.tsx rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices/HeroSection/components => RemoveAccount}/RemoveIntegrationAccount.scss (80%) rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices/HeroSection/components => RemoveAccount}/RemoveIntegrationAccount.tsx (61%) rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices => }/ServiceDashboards/ServiceDashboards.styles.scss (100%) rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices => }/ServiceDashboards/ServiceDashboards.tsx (100%) rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices => }/ServiceItem.tsx (93%) rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices => ServiceTabs}/ServicesTabs.style.scss (100%) create mode 100644 frontend/src/container/Integrations/CloudIntegration/ServiceTabs/ServicesTabs.tsx rename frontend/src/container/Integrations/CloudIntegration/{AmazonWebServices => }/ServicesList.tsx (95%) create mode 100644 frontend/src/container/Integrations/CloudIntegration/mapCloudAccountFromDto.ts rename frontend/src/container/Integrations/{CloudIntegration/AmazonWebServices => }/HeroSection/HeroSection.style.scss (100%) create mode 100644 frontend/src/container/Integrations/HeroSection/HeroSection.tsx rename frontend/src/container/Integrations/{CloudIntegration/AmazonWebServices => }/HeroSection/types.ts (100%) create mode 100644 frontend/src/hooks/integration/azure/useAccountSettingsModal.ts create mode 100644 frontend/src/hooks/integration/azure/useIntegrationModal.ts diff --git a/frontend/src/components/CodeBlock/CodeBlock.module.scss b/frontend/src/components/CodeBlock/CodeBlock.module.scss new file mode 100644 index 0000000000..d871b29e87 --- /dev/null +++ b/frontend/src/components/CodeBlock/CodeBlock.module.scss @@ -0,0 +1,22 @@ +.codeBlock { + position: relative; +} + +.codeBlockSyntaxHighlighter { + background-color: var(--l2-background) !important; + border-radius: 4px !important; + border: 1px solid var(--l2-border) !important; + color: var(--l2-foreground) !important; + + pre { + color: var(--l2-foreground) !important; + font-family: 'Geist Mono' !important; + font-size: 12px !important; + } + + code { + color: var(--l1-foreground) !important; + font-family: 'Geist Mono' !important; + font-size: 12px !important; + } +} diff --git a/frontend/src/components/CodeBlock/CodeBlock.test.tsx b/frontend/src/components/CodeBlock/CodeBlock.test.tsx new file mode 100644 index 0000000000..ee161f9f2d --- /dev/null +++ b/frontend/src/components/CodeBlock/CodeBlock.test.tsx @@ -0,0 +1,46 @@ +import { fireEvent, render, screen, waitFor } from '@testing-library/react'; + +import CodeBlock from './CodeBlock'; + +const mockCopyToClipboard = jest.fn(); + +jest.mock('react-use', () => ({ + useCopyToClipboard: (): [unknown, (text: string) => void] => [ + undefined, + mockCopyToClipboard, + ], +})); + +describe('CodeBlock', () => { + beforeEach(() => { + mockCopyToClipboard.mockReset(); + }); + + it('renders code block mode by default', () => { + render(); + + const container = screen.getByTestId('code-block-container'); + expect(container).toBeInTheDocument(); + expect(container).toHaveTextContent('const x = 1;'); + }); + + it('renders inline code when inline is true', () => { + render(); + + const inlineCode = screen.getByText('inline value'); + expect(inlineCode.tagName.toLowerCase()).toBe('code'); + expect(screen.queryByTestId('code-block-container')).not.toBeInTheDocument(); + }); + + it('copies code and triggers callback', async () => { + const onCopy = jest.fn(); + render(); + + fireEvent.click(screen.getByRole('button', { name: /copy code/i })); + + await waitFor(() => { + expect(mockCopyToClipboard).toHaveBeenCalledWith('SELECT * FROM logs;'); + }); + expect(onCopy).toHaveBeenCalledWith('SELECT * FROM logs;'); + }); +}); diff --git a/frontend/src/components/CodeBlock/CodeBlock.tsx b/frontend/src/components/CodeBlock/CodeBlock.tsx new file mode 100644 index 0000000000..d150b8fb61 --- /dev/null +++ b/frontend/src/components/CodeBlock/CodeBlock.tsx @@ -0,0 +1,89 @@ +import { useMemo, useState } from 'react'; +import { useCopyToClipboard } from 'react-use'; +import { Check, Copy } from '@signozhq/icons'; +import { Button } from '@signozhq/ui'; +import SyntaxHighlighter, { + a11yDark, +} from 'components/MarkdownRenderer/syntaxHighlighter'; + +import styles from './CodeBlock.module.scss'; + +export interface CodeBlockProps { + code: string; + language?: string; + className?: string; + inline?: boolean; + showLineNumbers?: boolean; + showCopyButton?: boolean; + onCopy?: (copiedCode: string) => void; +} + +function CodeBlock({ + code, + language = 'text', + className, + inline = false, + showLineNumbers = false, + showCopyButton = true, + onCopy, +}: CodeBlockProps): JSX.Element { + const [isCopied, setIsCopied] = useState(false); + const [, copyToClipboard] = useCopyToClipboard(); + const normalizedCode = useMemo(() => code?.replace(/\n$/, '') ?? '', [code]); + + const handleCopy = (): void => { + copyToClipboard(normalizedCode); + setIsCopied(true); + onCopy?.(normalizedCode); + + setTimeout(() => { + setIsCopied(false); + }, 1000); + }; + + if (inline) { + return {normalizedCode}; + } + + return ( +
+ {showCopyButton ? ( +
+ ); +} + +CodeBlock.defaultProps = { + language: 'text', + className: undefined, + inline: false, + showLineNumbers: false, + showCopyButton: true, + onCopy: undefined, +}; + +export default CodeBlock; diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AlertMessage.tsx b/frontend/src/container/Integrations/CloudIntegration/AlertMessage.tsx similarity index 64% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AlertMessage.tsx rename to frontend/src/container/Integrations/CloudIntegration/AlertMessage.tsx index c5f99298a2..7c768e7661 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AlertMessage.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/AlertMessage.tsx @@ -1,8 +1,8 @@ -import { Color } from '@signozhq/design-tokens'; -import { Alert, Spin } from 'antd'; -import { LoaderCircle, TriangleAlert } from 'lucide-react'; +import { Callout } from '@signozhq/ui'; +import { Spin } from 'antd'; +import { LoaderCircle } from 'lucide-react'; -import { ModalStateEnum } from '../types'; +import { ModalStateEnum } from '../HeroSection/types'; function AlertMessage({ modalState, @@ -12,14 +12,13 @@ function AlertMessage({ switch (modalState) { case ModalStateEnum.WAITING: return ( - } @@ -28,21 +27,19 @@ function AlertMessage({ 10 secs...
} - className="cloud-account-setup-form__alert" - type="warning" + type="info" + showIcon={false} /> ); case ModalStateEnum.ERROR: return ( - - {`We couldn't establish a connection to your AWS account. Please try again`}
} type="error" - className="cloud-account-setup-form__alert" /> ); default: diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AccountActions.style.scss b/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AccountActions/AccountActions.style.scss similarity index 97% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AccountActions.style.scss rename to frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AccountActions/AccountActions.style.scss index edc1cf068b..823da8d4b4 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AccountActions.style.scss +++ b/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AccountActions/AccountActions.style.scss @@ -117,6 +117,12 @@ min-width: 140px !important; } + &.azure { + .ant-select-selector { + min-width: 282px !important; + } + } + .ant-select-item-option-active { background: var(--l3-background) !important; } diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AccountActions.tsx b/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AccountActions/AccountActions.tsx similarity index 60% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AccountActions.tsx rename to frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AccountActions/AccountActions.tsx index 50b75de260..333b3187bd 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/AccountActions.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/AccountActions/AccountActions.tsx @@ -1,4 +1,4 @@ -import { useEffect, useMemo, useState } from 'react'; +import { Dispatch, SetStateAction, useEffect, useMemo, useState } from 'react'; import { useNavigate } from 'react-router-dom-v5-compat'; import { Color } from '@signozhq/design-tokens'; import { Button } from '@signozhq/ui'; @@ -6,19 +6,29 @@ import { Select, Skeleton } from 'antd'; import { SelectProps } from 'antd/lib'; import logEvent from 'api/common/logEvent'; import { useListAccounts } from 'api/generated/services/cloudintegration'; +import cx from 'classnames'; import { getAccountById } from 'container/Integrations/CloudIntegration/utils'; -import { INTEGRATION_TYPES } from 'container/Integrations/constants'; +import { + CloudAccount as IntegrationCloudAccount, + IntegrationType, +} from 'container/Integrations/types'; import useUrlQuery from 'hooks/useUrlQuery'; import { ChevronDown, Dot, PencilLine, Plug, Plus } from 'lucide-react'; -import { mapAccountDtoToAwsCloudAccount } from '../../mapAwsCloudAccountFromDto'; -import { CloudAccount } from '../../types'; -import AccountSettingsModal from './AccountSettingsModal'; -import CloudAccountSetupModal from './CloudAccountSetupModal'; +import AzureCloudAccountSetupModal from '../../AzureCloudServices/AddNewAccount/CloudAccountSetupModal'; +import AzureAccountSettingsModal from '../../AzureCloudServices/EditAccount/AccountSettingsModal'; +import { + mapAccountDtoToAwsCloudAccount, + mapAccountDtoToAzureCloudAccount, +} from '../../mapCloudAccountFromDto'; +import AwsCloudAccountSetupModal from '../AddNewAccount/CloudAccountSetupModal'; +import AwsAccountSettingsModal from '../EditAccount/AccountSettingsModal'; +import { CloudAccount as AwsCloudAccount } from '../types'; import './AccountActions.style.scss'; function AccountActionsRenderer({ + type, accounts, isLoading, activeAccount, @@ -27,9 +37,10 @@ function AccountActionsRenderer({ onIntegrationModalOpen, onAccountSettingsModalOpen, }: { - accounts: CloudAccount[] | undefined; + type: IntegrationType; + accounts: IntegrationCloudAccount[] | undefined; isLoading: boolean; - activeAccount: CloudAccount | null; + activeAccount: IntegrationCloudAccount | null; selectOptions: SelectProps['options']; onAccountChange: (value: string) => void; onIntegrationModalOpen: () => void; @@ -57,9 +68,11 @@ function AccountActionsRenderer({ ({ + label: `${region.label} (${region.value})`, + value: region.value, + }))} + getPopupContainer={popupContainer} + disabled={modalState === ModalStateEnum.WAITING} + /> + +
+ +
+
+ Which resource groups do you want to monitor? +
+
+ Add one or more Azure resource group names. +
+ + { + setResourceGroups(values); + form.setFieldValue('resourceGroups', values); + }} + /> + +
+
+ + + ); +} + +export default AccountSettingsModal; diff --git a/frontend/src/container/Integrations/CloudIntegration/CloudIntegration.tsx b/frontend/src/container/Integrations/CloudIntegration/CloudIntegration.tsx index d145ea4406..29677da3cc 100644 --- a/frontend/src/container/Integrations/CloudIntegration/CloudIntegration.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/CloudIntegration.tsx @@ -1,16 +1,15 @@ import { IntegrationType } from 'container/Integrations/types'; -import AWSTabs from './AmazonWebServices/ServicesTabs'; import Header from './Header/Header'; +import ServicesTabs from './ServiceTabs/ServicesTabs'; import './CloudIntegration.styles.scss'; const CloudIntegration = ({ type }: { type: IntegrationType }): JSX.Element => { return (
-
- - {type === IntegrationType.AWS_SERVICES && } +
+
); }; diff --git a/frontend/src/container/Integrations/CloudIntegration/Header/Header.tsx b/frontend/src/container/Integrations/CloudIntegration/Header/Header.tsx index 4d396ce2c4..4c92391718 100644 --- a/frontend/src/container/Integrations/CloudIntegration/Header/Header.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/Header/Header.tsx @@ -7,7 +7,7 @@ import { Blocks, LifeBuoy } from 'lucide-react'; import './Header.styles.scss'; -function Header({ title }: { title: IntegrationType }): JSX.Element { +function Header({ type }: { type: IntegrationType }): JSX.Element { return (
@@ -25,27 +25,30 @@ function Header({ title }: { title: IntegrationType }): JSX.Element { ), }, { - title:
{title}
, + title:
{type}
, }, ]} />
-
- -
+ + {type === IntegrationType.AWS_SERVICES && ( +
+ +
+ )}
); } diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/RemoveIntegrationAccount.scss b/frontend/src/container/Integrations/CloudIntegration/RemoveAccount/RemoveIntegrationAccount.scss similarity index 80% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/RemoveIntegrationAccount.scss rename to frontend/src/container/Integrations/CloudIntegration/RemoveAccount/RemoveIntegrationAccount.scss index bea414d66c..f9e64f6eab 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/RemoveIntegrationAccount.scss +++ b/frontend/src/container/Integrations/CloudIntegration/RemoveAccount/RemoveIntegrationAccount.scss @@ -1,9 +1,16 @@ .remove-integration-account-modal { + &__cloud-provider { + color: var(--l1-foreground); + font-weight: 500; + font-size: 14px; + line-height: 20px; + letter-spacing: -0.07px; + } + .ant-modal-content { background-color: var(--l1-background); border: 1px solid var(--l3-background); border-radius: 4px; - padding: 12px; } .ant-modal-close { diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/RemoveIntegrationAccount.tsx b/frontend/src/container/Integrations/CloudIntegration/RemoveAccount/RemoveIntegrationAccount.tsx similarity index 61% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/RemoveIntegrationAccount.tsx rename to frontend/src/container/Integrations/CloudIntegration/RemoveAccount/RemoveIntegrationAccount.tsx index 1ee027f3b7..1bce55ebaf 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/components/RemoveIntegrationAccount.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/RemoveAccount/RemoveIntegrationAccount.tsx @@ -4,16 +4,21 @@ import { Modal } from 'antd/lib'; import logEvent from 'api/common/logEvent'; import { useDisconnectAccount } from 'api/generated/services/cloudintegration'; import { SOMETHING_WENT_WRONG } from 'constants/api'; -import { INTEGRATION_TELEMETRY_EVENTS } from 'container/Integrations/constants'; +import { + INTEGRATION_TELEMETRY_EVENTS, + INTEGRATION_TYPES, +} from 'container/Integrations/constants'; import { useNotifications } from 'hooks/useNotifications'; import { Unlink } from 'lucide-react'; import './RemoveIntegrationAccount.scss'; function RemoveIntegrationAccount({ + cloudProvider, accountId, onRemoveIntegrationAccountSuccess, }: { + cloudProvider: string; accountId: string; onRemoveIntegrationAccountSuccess: () => void; }): JSX.Element { @@ -39,12 +44,13 @@ function RemoveIntegrationAccount({ }, }); const handleOk = (): void => { - logEvent(INTEGRATION_TELEMETRY_EVENTS.AWS_INTEGRATION_ACCOUNT_REMOVED, { + logEvent(INTEGRATION_TELEMETRY_EVENTS.INTEGRATION_ACCOUNT_REMOVED, { accountId, + integration: cloudProvider, }); disconnectAccount({ pathParams: { - cloudProvider: 'aws', + cloudProvider, id: accountId, }, }); @@ -78,13 +84,28 @@ function RemoveIntegrationAccount({ loading: isRemoveIntegrationLoading, }} > - Removing this account will remove all components created for sending - telemetry to SigNoz in your AWS account within the next ~15 minutes - (cloudformation stacks named signoz-integration-telemetry-collection in - enabled regions).
-
- After that, you can delete the cloudformation stack that was created - manually when connecting this account. + {cloudProvider === INTEGRATION_TYPES.AWS ? ( + <> + Removing this account will remove all components created for sending + telemetry to SigNoz in your AWS account within the next ~15 minutes + (cloudformation stacks named signoz-integration-telemetry-collection in + enabled regions).
+
+ After that, you can delete the cloudformation stack that was created + manually when connecting this account. + + ) : ( + <> + Removing this account will remove all components created for sending + telemetry to SigNoz in your Azure subscription within the next ~15 minutes + (deployment stack named signoz-integration-telemetry will be deleted + automatically).
+
+ After that, you have to manually delete 'signoz-integration' + deployment stack that was created while connecting this account (Takes ~20 + minutes to delete). + + )}
); diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceDashboards/ServiceDashboards.styles.scss b/frontend/src/container/Integrations/CloudIntegration/ServiceDashboards/ServiceDashboards.styles.scss similarity index 100% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceDashboards/ServiceDashboards.styles.scss rename to frontend/src/container/Integrations/CloudIntegration/ServiceDashboards/ServiceDashboards.styles.scss diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceDashboards/ServiceDashboards.tsx b/frontend/src/container/Integrations/CloudIntegration/ServiceDashboards/ServiceDashboards.tsx similarity index 100% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceDashboards/ServiceDashboards.tsx rename to frontend/src/container/Integrations/CloudIntegration/ServiceDashboards/ServiceDashboards.tsx diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceItem.tsx b/frontend/src/container/Integrations/CloudIntegration/ServiceItem.tsx similarity index 93% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceItem.tsx rename to frontend/src/container/Integrations/CloudIntegration/ServiceItem.tsx index 88b512decc..aa834b823f 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServiceItem.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/ServiceItem.tsx @@ -1,7 +1,7 @@ import cx from 'classnames'; import LineClampedText from 'periscope/components/LineClampedText/LineClampedText'; -import { Service } from './types'; +import { Service } from './AmazonWebServices/types'; function ServiceItem({ service, diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServicesTabs.style.scss b/frontend/src/container/Integrations/CloudIntegration/ServiceTabs/ServicesTabs.style.scss similarity index 100% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServicesTabs.style.scss rename to frontend/src/container/Integrations/CloudIntegration/ServiceTabs/ServicesTabs.style.scss diff --git a/frontend/src/container/Integrations/CloudIntegration/ServiceTabs/ServicesTabs.tsx b/frontend/src/container/Integrations/CloudIntegration/ServiceTabs/ServicesTabs.tsx new file mode 100644 index 0000000000..dbdb2b4101 --- /dev/null +++ b/frontend/src/container/Integrations/CloudIntegration/ServiceTabs/ServicesTabs.tsx @@ -0,0 +1,30 @@ +import { IntegrationType } from 'container/Integrations/types'; +import useUrlQuery from 'hooks/useUrlQuery'; + +import HeroSection from '../../HeroSection/HeroSection'; +import ServiceDetails from '../AmazonWebServices/ServiceDetails/ServiceDetails'; +import ServicesList from '../ServicesList'; + +import './ServicesTabs.style.scss'; + +function ServicesTabs({ type }: { type: IntegrationType }): JSX.Element { + const urlQuery = useUrlQuery(); + const cloudAccountId = urlQuery.get('cloudAccountId') || ''; + + return ( +
+ + +
+
+ +
+
+ +
+
+
+ ); +} + +export default ServicesTabs; diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServicesList.tsx b/frontend/src/container/Integrations/CloudIntegration/ServicesList.tsx similarity index 95% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServicesList.tsx rename to frontend/src/container/Integrations/CloudIntegration/ServicesList.tsx index 936db77131..9de64b0001 100644 --- a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/ServicesList.tsx +++ b/frontend/src/container/Integrations/CloudIntegration/ServicesList.tsx @@ -4,15 +4,20 @@ import { Skeleton } from 'antd'; import { useListServicesMetadata } from 'api/generated/services/cloudintegration'; import type { CloudintegrationtypesServiceMetadataDTO } from 'api/generated/services/sigNoz.schemas'; import cx from 'classnames'; +import { IntegrationType } from 'container/Integrations/types'; import useUrlQuery from 'hooks/useUrlQuery'; import emptyStateIconUrl from '@/assets/Icons/emptyState.svg'; interface ServicesListProps { cloudAccountId: string; + type: IntegrationType; } -function ServicesList({ cloudAccountId }: ServicesListProps): JSX.Element { +function ServicesList({ + cloudAccountId, + type, +}: ServicesListProps): JSX.Element { const urlQuery = useUrlQuery(); const navigate = useNavigate(); const hasValidCloudAccountId = Boolean(cloudAccountId); @@ -22,7 +27,7 @@ function ServicesList({ cloudAccountId }: ServicesListProps): JSX.Element { const { data: servicesMetadata, isLoading } = useListServicesMetadata( { - cloudProvider: 'aws', + cloudProvider: type, }, serviceQueryParams, ); diff --git a/frontend/src/container/Integrations/CloudIntegration/mapCloudAccountFromDto.ts b/frontend/src/container/Integrations/CloudIntegration/mapCloudAccountFromDto.ts new file mode 100644 index 0000000000..cbd84f6338 --- /dev/null +++ b/frontend/src/container/Integrations/CloudIntegration/mapCloudAccountFromDto.ts @@ -0,0 +1,49 @@ +import { CloudintegrationtypesAccountDTO } from 'api/generated/services/sigNoz.schemas'; +import { CloudAccount as IntegrationCloudAccount } from 'container/Integrations/types'; + +import { CloudAccount as AwsCloudAccount } from './AmazonWebServices/types'; + +export function mapAccountDtoToAwsCloudAccount( + account: CloudintegrationtypesAccountDTO, +): AwsCloudAccount | null { + if (!account.providerAccountId) { + return null; + } + + return { + id: account.id, + cloud_account_id: account.id, + config: { + regions: account.config?.aws?.regions ?? [], + }, + status: { + integration: { + last_heartbeat_ts_ms: account.agentReport?.timestampMillis ?? 0, + }, + }, + providerAccountId: account.providerAccountId, + }; +} + +export function mapAccountDtoToAzureCloudAccount( + account: CloudintegrationtypesAccountDTO, +): IntegrationCloudAccount | null { + if (!account.providerAccountId) { + return null; + } + + return { + id: account.id, + cloud_account_id: account.id, + config: { + deployment_region: account.config?.azure?.deploymentRegion ?? '', + resource_groups: account.config?.azure?.resourceGroups ?? [], + }, + status: { + integration: { + last_heartbeat_ts_ms: account.agentReport?.timestampMillis ?? 0, + }, + }, + providerAccountId: account.providerAccountId, + }; +} diff --git a/frontend/src/container/Integrations/CloudIntegration/utils.ts b/frontend/src/container/Integrations/CloudIntegration/utils.ts index 2d506b9628..e8b9317fc1 100644 --- a/frontend/src/container/Integrations/CloudIntegration/utils.ts +++ b/frontend/src/container/Integrations/CloudIntegration/utils.ts @@ -1,5 +1,32 @@ +import { ONE_CLICK_INTEGRATIONS } from '../constants'; +import { IntegrationType } from '../types'; + export const getAccountById = ( accounts: T[], accountId: string, ): T | null => accounts.find((account) => account.cloud_account_id === accountId) || null; + +interface IntegrationMetadata { + title: string; + description: string; + logo: string; +} + +export const getIntegrationMetadata = ( + type: IntegrationType, +): IntegrationMetadata => { + const integration = ONE_CLICK_INTEGRATIONS.find( + (integration) => integration.id === type, + ); + + if (!integration) { + return { title: '', description: '', logo: '' }; + } + + return { + title: integration.title, + description: integration.description, + logo: integration.icon, + }; +}; diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/HeroSection.style.scss b/frontend/src/container/Integrations/HeroSection/HeroSection.style.scss similarity index 100% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/HeroSection.style.scss rename to frontend/src/container/Integrations/HeroSection/HeroSection.style.scss diff --git a/frontend/src/container/Integrations/HeroSection/HeroSection.tsx b/frontend/src/container/Integrations/HeroSection/HeroSection.tsx new file mode 100644 index 0000000000..4c524a16bc --- /dev/null +++ b/frontend/src/container/Integrations/HeroSection/HeroSection.tsx @@ -0,0 +1,33 @@ +import { IntegrationType } from 'container/Integrations/types'; + +import AccountActions from '../CloudIntegration/AmazonWebServices/AccountActions/AccountActions'; +import { getIntegrationMetadata } from '../CloudIntegration/utils'; + +import './HeroSection.style.scss'; + +function HeroSection({ type }: { type: IntegrationType }): JSX.Element { + const { + title, + description, + logo: integrationLogo, + } = getIntegrationMetadata(type); + + return ( +
+
+
+
+ {type} +
+ +
{title}
+
+
{description}
+
+ + +
+ ); +} + +export default HeroSection; diff --git a/frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/types.ts b/frontend/src/container/Integrations/HeroSection/types.ts similarity index 100% rename from frontend/src/container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/types.ts rename to frontend/src/container/Integrations/HeroSection/types.ts diff --git a/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss b/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss index 6a0ecc1a16..7cff9de354 100644 --- a/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss +++ b/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.styles.scss @@ -9,53 +9,6 @@ flex-direction: column; gap: 16px; - .error-container { - display: flex; - border-radius: 6px; - border: 1px solid var(--l1-border); - background: var(--l1-background); - align-items: center; - justify-content: center; - flex-direction: column; - - .error-content { - display: flex; - flex-direction: column; - justify-content: center; - height: 300px; - gap: 15px; - - .error-btns { - display: flex; - flex-direction: row; - gap: 16px; - align-items: center; - - .retry-btn { - display: flex; - align-items: center; - } - - .contact-support { - display: flex; - align-items: center; - gap: 4px; - cursor: pointer; - - .text { - color: var(--callout-primary-description); - font-weight: 500; - } - } - } - - .error-state-svg { - height: 40px; - width: 40px; - } - } - } - .loading-integration-details { display: flex; flex-direction: column; @@ -327,6 +280,36 @@ } } } + + .error-container { + display: flex; + border-radius: 6px; + border: 1px solid var(--l1-border); + background: var(--l1-background); + align-items: center; + justify-content: center; + flex-direction: column; + + .error-content { + display: flex; + flex-direction: column; + justify-content: center; + height: 300px; + gap: 15px; + + .error-btns { + display: flex; + flex-direction: row; + gap: 12px; + align-items: center; + } + + .error-state-svg { + height: 40px; + width: 40px; + } + } + } } .remove-integration-modal { diff --git a/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx b/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx index 4dfb9a815b..d43d2cc464 100644 --- a/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx +++ b/frontend/src/container/Integrations/IntegrationDetailPage/IntegrationDetailPage.tsx @@ -1,6 +1,5 @@ import { useState } from 'react'; import { useHistory, useParams } from 'react-router-dom'; -import { Color } from '@signozhq/design-tokens'; import { Button } from '@signozhq/ui'; import { Flex, Skeleton, Typography } from 'antd'; import ROUTES from 'constants/routes'; @@ -55,8 +54,19 @@ function IntegrationDetailPage(): JSX.Element { ), ); - if (integrationId === INTEGRATION_TYPES.AWS) { - return ; + if ( + integrationId === INTEGRATION_TYPES.AWS || + integrationId === INTEGRATION_TYPES.AZURE + ) { + return ( + + ); } return ( @@ -85,20 +95,20 @@ function IntegrationDetailPage(): JSX.Element {
-
handleContactSupport(isCloudUserVal)} + suffix={} > - Contact Support - - -
+ Contact Support +
diff --git a/frontend/src/container/Integrations/OneClickIntegrations/OneClickIntegrations.tsx b/frontend/src/container/Integrations/OneClickIntegrations/OneClickIntegrations.tsx index affb28d24f..62c11705f2 100644 --- a/frontend/src/container/Integrations/OneClickIntegrations/OneClickIntegrations.tsx +++ b/frontend/src/container/Integrations/OneClickIntegrations/OneClickIntegrations.tsx @@ -22,6 +22,7 @@ function OneClickIntegrations(props: OneClickIntegrationsProps): JSX.Element { if (!query) { return ONE_CLICK_INTEGRATIONS; } + return ONE_CLICK_INTEGRATIONS.filter( (integration) => integration.title.toLowerCase().includes(query) || diff --git a/frontend/src/container/Integrations/constants.ts b/frontend/src/container/Integrations/constants.ts index 9bb8155eda..8645bcf9a0 100644 --- a/frontend/src/container/Integrations/constants.ts +++ b/frontend/src/container/Integrations/constants.ts @@ -14,8 +14,8 @@ export const INTEGRATION_TELEMETRY_EVENTS = { 'Integrations Detail Page: Clicked remove Integration button for integration', INTEGRATIONS_DETAIL_CONFIGURE_INSTRUCTION: 'Integrations Detail Page: Navigated to configure an integration', - AWS_INTEGRATION_ACCOUNT_REMOVED: - 'AWS Integration Detail page: Clicked remove Integration button for integration', + INTEGRATION_ACCOUNT_REMOVED: + 'Integration Detail page: Clicked remove Integration button for integration', }; export const INTEGRATION_TYPES = { @@ -53,7 +53,7 @@ export const AZURE_INTEGRATION = { is_new: true, }; -export const ONE_CLICK_INTEGRATIONS = [AWS_INTEGRATION]; +export const ONE_CLICK_INTEGRATIONS = [AWS_INTEGRATION, AZURE_INTEGRATION]; export const AZURE_REGIONS: AzureRegion[] = [ { @@ -81,6 +81,7 @@ export const AZURE_REGIONS: AzureRegion[] = [ { label: 'Central India', value: 'centralindia', geography: 'India' }, { label: 'Central US', value: 'centralus', geography: 'United States' }, { label: 'Chile Central', value: 'chilecentral', geography: 'Chile' }, + { label: 'Denmark East', value: 'denmarkeast', geography: 'Denmark' }, { label: 'East Asia', value: 'eastasia', geography: 'Asia Pacific' }, { label: 'East US', value: 'eastus', geography: 'United States' }, { label: 'East US 2', value: 'eastus2', geography: 'United States' }, diff --git a/frontend/src/container/Integrations/types.ts b/frontend/src/container/Integrations/types.ts index 793f3550fa..9e29a1a087 100644 --- a/frontend/src/container/Integrations/types.ts +++ b/frontend/src/container/Integrations/types.ts @@ -4,8 +4,8 @@ import { } from './CloudIntegration/AmazonWebServices/types'; export enum IntegrationType { - AWS_SERVICES = 'aws-services', - AZURE_SERVICES = 'azure-services', + AWS_SERVICES = 'aws', + AZURE_SERVICES = 'azure', } interface LogField { @@ -89,6 +89,7 @@ export interface CloudAccount { cloud_account_id: string; config: AzureCloudAccountConfig | AWSCloudAccountConfig; status: AccountStatus | IServiceStatus; + providerAccountId: string; } export interface AzureCloudAccountConfig { diff --git a/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts b/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts index 8e40c0f8f5..097b89c64f 100644 --- a/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts +++ b/frontend/src/hooks/Integrations/useGetIntegrationStatus.ts @@ -7,6 +7,13 @@ import { GetIntegrationStatusProps, } from 'types/api/integrations/types'; +export function isOneClickIntegration(integrationId: string): boolean { + return ( + integrationId === INTEGRATION_TYPES.AWS || + integrationId === INTEGRATION_TYPES.AZURE + ); +} + export const useGetIntegrationStatus = ({ integrationId, }: GetIntegrationPayloadProps): UseQueryResult< @@ -20,5 +27,5 @@ export const useGetIntegrationStatus = ({ enabled: !!integrationId && integrationId !== '' && - integrationId !== INTEGRATION_TYPES.AWS, + !isOneClickIntegration(integrationId), }); diff --git a/frontend/src/hooks/integration/aws/useIntegrationModal.ts b/frontend/src/hooks/integration/aws/useIntegrationModal.ts index 72a400aaaf..1d0206e79a 100644 --- a/frontend/src/hooks/integration/aws/useIntegrationModal.ts +++ b/frontend/src/hooks/integration/aws/useIntegrationModal.ts @@ -20,11 +20,11 @@ import { CloudintegrationtypesCredentialsDTO, CloudintegrationtypesPostableAccountDTO, } from 'api/generated/services/sigNoz.schemas'; +import { INTEGRATION_TYPES } from 'container/Integrations/constants'; import { ActiveViewEnum, ModalStateEnum, -} from 'container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/types'; -import { INTEGRATION_TYPES } from 'container/Integrations/constants'; +} from 'container/Integrations/HeroSection/types'; import useAxiosError from 'hooks/useAxiosError'; import { regions } from 'utils/regions'; diff --git a/frontend/src/hooks/integration/azure/useAccountSettingsModal.ts b/frontend/src/hooks/integration/azure/useAccountSettingsModal.ts new file mode 100644 index 0000000000..d5c1b9c014 --- /dev/null +++ b/frontend/src/hooks/integration/azure/useAccountSettingsModal.ts @@ -0,0 +1,142 @@ +import { + Dispatch, + SetStateAction, + useCallback, + useEffect, + useMemo, + useState, +} from 'react'; +import { toast } from '@signozhq/ui'; +import { Form } from 'antd'; +import { FormInstance } from 'antd/lib'; +import { useUpdateAccount } from 'api/generated/services/cloudintegration'; +import { INTEGRATION_TYPES } from 'container/Integrations/constants'; +import { CloudAccount } from 'container/Integrations/types'; +import { isEqual } from 'lodash-es'; + +import logEvent from '../../../api/common/logEvent'; + +interface UseAccountSettingsModalProps { + onClose: () => void; + account: CloudAccount; + setActiveAccount: Dispatch>; +} + +interface UseAccountSettingsModal { + form: FormInstance; + isLoading: boolean; + resourceGroups: string[]; + isSaveDisabled: boolean; + setResourceGroups: Dispatch>; + handleSubmit: () => Promise; + handleClose: () => void; +} + +export function useAccountSettingsModal({ + onClose, + account, + setActiveAccount, +}: UseAccountSettingsModalProps): UseAccountSettingsModal { + const [form] = Form.useForm(); + const { mutate: updateAccount, isLoading } = useUpdateAccount(); + const accountConfig = useMemo( + () => ('deployment_region' in account.config ? account.config : null), + [account.config], + ); + const [resourceGroups, setResourceGroups] = useState( + accountConfig?.resource_groups || [], + ); + + useEffect(() => { + if (!accountConfig) { + return; + } + + form.setFieldsValue({ + region: accountConfig.deployment_region, + resourceGroups: accountConfig.resource_groups, + }); + setResourceGroups(accountConfig.resource_groups); + }, [accountConfig, form]); + + const handleSubmit = useCallback(async (): Promise => { + try { + const values = await form.validateFields(); + + updateAccount( + { + pathParams: { + cloudProvider: INTEGRATION_TYPES.AZURE, + id: account?.id || '', + }, + data: { + config: { + azure: { + resourceGroups: values.resourceGroups || [], + }, + }, + }, + }, + { + onSuccess: () => { + const nextConfig = { + deployment_region: accountConfig?.deployment_region || '', + resource_groups: values.resourceGroups || [], + }; + + setActiveAccount({ + ...account, + config: nextConfig, + }); + onClose(); + + toast.success('Account settings updated successfully', { + position: 'bottom-right', + }); + + logEvent('Azure Integration: Account settings updated', { + cloudAccountId: account.cloud_account_id, + deploymentRegion: nextConfig.deployment_region, + resourceGroups: nextConfig.resource_groups, + }); + }, + onError: (error) => { + toast.error('Failed to update account settings', { + description: error?.message, + position: 'bottom-right', + }); + }, + }, + ); + } catch (error) { + console.error('Form submission failed:', error); + } + }, [form, updateAccount, account, setActiveAccount, onClose]); + + const isSaveDisabled = useMemo(() => { + if (!accountConfig) { + return true; + } + + const formResourceGroups = resourceGroups || []; + + return isEqual( + [...formResourceGroups].sort(), + [...accountConfig.resource_groups].sort(), + ); + }, [accountConfig, resourceGroups, form]); + + const handleClose = useCallback(() => { + onClose(); + }, [onClose]); + + return { + form, + isLoading, + resourceGroups, + isSaveDisabled, + setResourceGroups, + handleSubmit, + handleClose, + }; +} diff --git a/frontend/src/hooks/integration/azure/useIntegrationModal.ts b/frontend/src/hooks/integration/azure/useIntegrationModal.ts new file mode 100644 index 0000000000..64cfaac4b1 --- /dev/null +++ b/frontend/src/hooks/integration/azure/useIntegrationModal.ts @@ -0,0 +1,188 @@ +import { Dispatch, SetStateAction, useCallback, useState } from 'react'; +import { useQueryClient } from 'react-query'; +import { toast } from '@signozhq/ui'; +import { Form, FormInstance } from 'antd'; +import { + CreateAccountMutationResult, + GetConnectionCredentialsQueryResult, + invalidateListAccounts, + useCreateAccount, + useGetConnectionCredentials, +} from 'api/generated/services/cloudintegration'; +import { + CloudintegrationtypesCredentialsDTO, + CloudintegrationtypesPostableAccountDTO, +} from 'api/generated/services/sigNoz.schemas'; +import { INTEGRATION_TYPES } from 'container/Integrations/constants'; +import { ModalStateEnum } from 'container/Integrations/HeroSection/types'; +import useAxiosError from 'hooks/useAxiosError'; + +import logEvent from '../../../api/common/logEvent'; + +interface UseIntegrationModalProps { + onClose: () => void; +} + +interface UseAzureIntegrationModal { + form: FormInstance; + modalState: ModalStateEnum; + isLoading: boolean; + accountId?: string; + connectionCommands: { + cliCommand: string; + cloudPowerShellCommand: string; + } | null; + setModalState: Dispatch>; + handleSubmit: () => Promise; + handleClose: () => void; + connectionParams?: CloudintegrationtypesCredentialsDTO; + isConnectionParamsLoading: boolean; + handleConnectionSuccess: (payload: { + cloudAccountId: string; + status?: unknown; + }) => void; + handleConnectionTimeout: (payload: { id?: string }) => void; + handleConnectionError: () => void; +} + +export function useIntegrationModal({ + onClose, +}: UseIntegrationModalProps): UseAzureIntegrationModal { + const queryClient = useQueryClient(); + const [form] = Form.useForm(); + const [modalState, setModalState] = useState( + ModalStateEnum.FORM, + ); + const [isLoading, setIsLoading] = useState(false); + const [accountId, setAccountId] = useState(undefined); + const [connectionCommands, setConnectionCommands] = useState<{ + cliCommand: string; + cloudPowerShellCommand: string; + } | null>(null); + + const handleClose = useCallback((): void => { + setModalState(ModalStateEnum.FORM); + setConnectionCommands(null); + onClose(); + }, [onClose]); + + const handleConnectionSuccess = useCallback( + (payload: { cloudAccountId: string; status?: unknown }): void => { + logEvent('Azure Integration: Account connected', { + cloudAccountId: payload.cloudAccountId, + status: payload.status, + }); + toast.success('Azure account connected successfully', { + position: 'bottom-right', + }); + void invalidateListAccounts(queryClient, { + cloudProvider: INTEGRATION_TYPES.AZURE, + }); + handleClose(); + }, + [handleClose, queryClient], + ); + + const handleConnectionTimeout = useCallback( + (payload: { id?: string }): void => { + setModalState(ModalStateEnum.ERROR); + logEvent('Azure Integration: Account connection attempt timed out', { + id: payload.id, + }); + }, + [], + ); + + const handleConnectionError = useCallback((): void => { + setModalState(ModalStateEnum.ERROR); + }, []); + + const { mutate: createAccount } = useCreateAccount(); + const handleError = useAxiosError(); + + const { data: connectionParams, isLoading: isConnectionParamsLoading } = + useGetConnectionCredentials( + { + cloudProvider: INTEGRATION_TYPES.AZURE, + }, + { + query: { + onError: handleError, + }, + }, + ); + + const handleSubmit = useCallback(async (): Promise => { + try { + setIsLoading(true); + const values = await form.validateFields(); + + const payload: CloudintegrationtypesPostableAccountDTO = { + config: { + azure: { + deploymentRegion: values.region, + resourceGroups: values.resourceGroups || [], + }, + }, + credentials: { + ingestionUrl: connectionParams?.data?.ingestionUrl || values.ingestionUrl, + ingestionKey: connectionParams?.data?.ingestionKey || values.ingestionKey, + sigNozApiUrl: connectionParams?.data?.sigNozApiUrl || values.sigNozApiUrl, + sigNozApiKey: connectionParams?.data?.sigNozApiKey || values.sigNozApiKey, + }, + }; + + createAccount( + { + pathParams: { cloudProvider: INTEGRATION_TYPES.AZURE }, + data: payload, + }, + { + onSuccess: (response: CreateAccountMutationResult) => { + const nextAccountId = response.data.id; + const artifact = response.data.connectionArtifact.azure; + + logEvent('Azure Integration: Account connection commands generated', { + id: nextAccountId, + }); + + setConnectionCommands({ + cliCommand: artifact?.cliCommand || '', + cloudPowerShellCommand: artifact?.cloudPowerShellCommand || '', + }); + setModalState(ModalStateEnum.WAITING); + setAccountId(nextAccountId); + }, + onError: () => { + setModalState(ModalStateEnum.ERROR); + toast.error('Failed to create account connection', { + position: 'bottom-right', + }); + }, + }, + ); + } catch (error) { + console.error('Form submission failed:', error); + } finally { + setIsLoading(false); + } + }, [form, connectionParams, createAccount]); + + return { + form, + modalState, + isLoading, + accountId, + connectionCommands, + setModalState, + handleSubmit, + handleClose, + connectionParams: connectionParams?.data as + | CloudintegrationtypesCredentialsDTO + | undefined, + isConnectionParamsLoading, + handleConnectionSuccess, + handleConnectionTimeout, + handleConnectionError, + }; +} From 14a032119abf5e22261c6c8cad8948dcf1423876 Mon Sep 17 00:00:00 2001 From: swapnil-signoz Date: Wed, 29 Apr 2026 10:53:22 +0530 Subject: [PATCH 17/19] chore: bumping cloud integration agent version to v0.0.10 (#11135) * chore: bumping agent version to v0.0.10 * chore: depployment --- pkg/modules/cloudintegration/config.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/modules/cloudintegration/config.go b/pkg/modules/cloudintegration/config.go index 95bf1eaa5f..713b6f4210 100644 --- a/pkg/modules/cloudintegration/config.go +++ b/pkg/modules/cloudintegration/config.go @@ -22,7 +22,7 @@ func newConfig() factory.Config { Agent: AgentConfig{ // we will maintain the latest version of cloud integration agent from here, // till we automate it externally or figure out a way to validate it. - Version: "v0.0.9", + Version: "v0.0.10", }, } } From f4e5534e53e1171a8ec4a412247afe2343a97267 Mon Sep 17 00:00:00 2001 From: Abhi kumar Date: Wed, 29 Apr 2026 12:25:01 +0530 Subject: [PATCH 18/19] chore: updated drilldown popup ui to match tooltip (#11113) --- .../hooks/usePanelContextMenu.ts | 7 +- .../QueryTable/Drilldown/drilldownUtils.tsx | 6 ++ .../Drilldown/useAggregateDrilldown.tsx | 1 + .../Drilldown/useBaseAggregateOptions.tsx | 8 +- .../components/ContextMenu/styles.scss | 77 ++++--------------- 5 files changed, 33 insertions(+), 66 deletions(-) diff --git a/frontend/src/container/DashboardContainer/visualization/hooks/usePanelContextMenu.ts b/frontend/src/container/DashboardContainer/visualization/hooks/usePanelContextMenu.ts index aa8dbe9299..1c6d8d6670 100644 --- a/frontend/src/container/DashboardContainer/visualization/hooks/usePanelContextMenu.ts +++ b/frontend/src/container/DashboardContainer/visualization/hooks/usePanelContextMenu.ts @@ -104,7 +104,12 @@ export const usePanelContextMenu = ({ } if (data && data?.record?.queryName) { - onClick(data.coord, { ...data.record, label: data.label, timeRange }); + onClick(data.coord, { + ...data.record, + label: data.label, + seriesColor: data.seriesColor, + timeRange, + }); } }, [onClick, queryResponse], diff --git a/frontend/src/container/QueryTable/Drilldown/drilldownUtils.tsx b/frontend/src/container/QueryTable/Drilldown/drilldownUtils.tsx index 7e150ba318..60ad9b19f2 100644 --- a/frontend/src/container/QueryTable/Drilldown/drilldownUtils.tsx +++ b/frontend/src/container/QueryTable/Drilldown/drilldownUtils.tsx @@ -196,6 +196,7 @@ export const getUplotClickData = ({ coord: { x: number; y: number }; record: { queryName: string; filters: FilterData[] }; label: string | React.ReactNode; + seriesColor?: string; } | null => { if (!queryData?.queryName || !metric) { return null; @@ -208,6 +209,8 @@ export const getUplotClickData = ({ // Generate label from focusedSeries data let label: string | React.ReactNode = ''; + const seriesColor = focusedSeries?.color; + if (focusedSeries && focusedSeries.seriesName) { label = ( @@ -223,6 +226,7 @@ export const getUplotClickData = ({ }, record, label, + seriesColor, }; }; @@ -237,6 +241,7 @@ export const getPieChartClickData = ( queryName: string; filters: FilterData[]; label: string | React.ReactNode; + seriesColor?: string; } | null => { const { metric, queryName } = arc.data.record; if (!queryName || !metric) { @@ -248,6 +253,7 @@ export const getPieChartClickData = ( queryName, filters: getFiltersFromMetric(metric), // TODO: add where clause query as well. label, + seriesColor: arc.data.color, }; }; diff --git a/frontend/src/container/QueryTable/Drilldown/useAggregateDrilldown.tsx b/frontend/src/container/QueryTable/Drilldown/useAggregateDrilldown.tsx index f1720ae259..ab8acd8582 100644 --- a/frontend/src/container/QueryTable/Drilldown/useAggregateDrilldown.tsx +++ b/frontend/src/container/QueryTable/Drilldown/useAggregateDrilldown.tsx @@ -22,6 +22,7 @@ export interface AggregateData { endTime: number; }; label?: string | React.ReactNode; + seriesColor?: string; } const useAggregateDrilldown = ({ diff --git a/frontend/src/container/QueryTable/Drilldown/useBaseAggregateOptions.tsx b/frontend/src/container/QueryTable/Drilldown/useBaseAggregateOptions.tsx index a9e851eb4f..604e821c5b 100644 --- a/frontend/src/container/QueryTable/Drilldown/useBaseAggregateOptions.tsx +++ b/frontend/src/container/QueryTable/Drilldown/useBaseAggregateOptions.tsx @@ -228,7 +228,13 @@ const useBaseAggregateOptions = ({ return ( : icon} + icon={ + isLoading ? ( + + ) : ( + {icon} + ) + } onClick={(): void => onClick()} disabled={isLoading} > diff --git a/frontend/src/periscope/components/ContextMenu/styles.scss b/frontend/src/periscope/components/ContextMenu/styles.scss index 40a7567932..503930ff71 100644 --- a/frontend/src/periscope/components/ContextMenu/styles.scss +++ b/frontend/src/periscope/components/ContextMenu/styles.scss @@ -4,7 +4,7 @@ gap: 8px; padding: 8px; cursor: pointer; - color: var(--foreground); + color: var(--muted-foreground); font-family: Inter; font-size: var(--font-size-sm); font-weight: 600; @@ -20,13 +20,10 @@ overflow: hidden; text-overflow: ellipsis; - &:hover { - background-color: var(--l1-background); - } - + &:hover, &:focus { outline: none; - background-color: var(--l1-background); + background-color: var(--l2-background-hover); } &.disabled { @@ -47,7 +44,8 @@ } &:hover { - background-color: var(--bg-cherry-100); + background-color: var(--danger-background); + color: var(--l1-foreground); } } @@ -74,73 +72,24 @@ } .context-menu-header { - padding-bottom: 4px; - border-bottom: 1px solid var(--l1-border); + padding: 8px 12px; + border-bottom: 1px solid var(--l2-border); color: var(--muted-foreground); } -// Target the popover inner specifically for context menu .context-menu .ant-popover-inner { - padding: 12px 8px !important; - // max-height: 254px !important; - max-width: 300px !important; + padding: 0; + border-radius: 6px; + max-width: 300px; + background: var(--l2-background) !important; + border: 1px solid var(--l2-border) !important; } -// Dark mode support -.darkMode { - .context-menu-item { - color: var(--muted-foreground); - - &:hover, - &:focus { - background-color: var(--l2-background); - } - - &.danger { - color: var(--bg-cherry-400); - - .icon { - color: var(--bg-cherry-400); - } - - &:hover { - background-color: var(--danger-background); - color: var(--l1-foreground); - } - } - - .icon { - color: var(--bg-robin-400); - } - } - - .context-menu-header { - border-bottom: 1px solid var(--l1-border); - color: var(--muted-foreground); - } - - // Set the menu popover background - .context-menu .ant-popover-inner { - background: var(--l1-background) !important; - border: 1px solid var(--border) !important; - } -} - -// Context menu backdrop overlay .context-menu-backdrop { position: fixed; - top: 0; - left: 0; - width: 100vw; - height: 100vh; + inset: 0; z-index: 9999; background: transparent; cursor: default; - - // Prevent any pointer events from reaching elements behind pointer-events: auto; - - // Ensure it covers the entire viewport including any scrollable areas - position: fixed !important; - inset: 0; } From a672335a33bd263d63c38aa0bc5c9b1ce27889b8 Mon Sep 17 00:00:00 2001 From: Piyush Singariya Date: Wed, 29 Apr 2026 14:20:28 +0530 Subject: [PATCH 19/19] fix: Body Search warning with FTS in JSON Logs (#10807) * fix: fts warning miss in direct text search * fix: comments * test: added one more test variation * ci: go lint * fix: fts warning update * fix: integration tests * fix: go test and fmtlint --- pkg/querybuilder/constants.go | 4 ++ pkg/querybuilder/where_clause_visitor.go | 8 ++++ pkg/telemetrylogs/stmt_builder_test.go | 45 +++++++++++++------ .../01_logs_json_body_new_qb.py | 12 ++++- 4 files changed, 53 insertions(+), 16 deletions(-) diff --git a/pkg/querybuilder/constants.go b/pkg/querybuilder/constants.go index 7b82b4530a..e427fdf137 100644 --- a/pkg/querybuilder/constants.go +++ b/pkg/querybuilder/constants.go @@ -4,6 +4,10 @@ const ( TrueConditionLiteral = "true" SkipConditionLiteral = "__skip__" ErrorConditionLiteral = "__skip_because_of_error__" + + // BodyFullTextSearchDefaultWarning is emitted when a full-text search or "body" searches are hit + // with New JSON Body enhancements. + BodyFullTextSearchDefaultWarning = "Full text searches default to `body.message:string`. Use `body.` to search a different field inside body" ) var ( diff --git a/pkg/querybuilder/where_clause_visitor.go b/pkg/querybuilder/where_clause_visitor.go index 07feebce0b..89acc73698 100644 --- a/pkg/querybuilder/where_clause_visitor.go +++ b/pkg/querybuilder/where_clause_visitor.go @@ -362,6 +362,10 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error())) return ErrorConditionLiteral } + if v.bodyJSONEnabled && v.fullTextColumn.Name == "body" { + v.warnings = append(v.warnings, BodyFullTextSearchDefaultWarning) + } + return cond } @@ -717,6 +721,10 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an return ErrorConditionLiteral } + if v.bodyJSONEnabled && v.fullTextColumn.Name == "body" { + v.warnings = append(v.warnings, BodyFullTextSearchDefaultWarning) + } + return cond } diff --git a/pkg/telemetrylogs/stmt_builder_test.go b/pkg/telemetrylogs/stmt_builder_test.go index 480bb21de7..4a952597bb 100644 --- a/pkg/telemetrylogs/stmt_builder_test.go +++ b/pkg/telemetrylogs/stmt_builder_test.go @@ -894,12 +894,12 @@ func TestAdjustKey(t *testing.T) { func TestStmtBuilderBodyField(t *testing.T) { cases := []struct { - name string - requestType qbtypes.RequestType - query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] + name string + requestType qbtypes.RequestType + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] enableUseJSONBody bool - expected qbtypes.Statement - expectedErr error + expected qbtypes.Statement + expectedErr error }{ { name: "body_exists", @@ -1039,15 +1039,15 @@ func TestStmtBuilderBodyField(t *testing.T) { func TestStmtBuilderBodyFullTextSearch(t *testing.T) { cases := []struct { - name string - requestType qbtypes.RequestType - query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] + name string + requestType qbtypes.RequestType + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] enableUseJSONBody bool - expected qbtypes.Statement - expectedErr error + expected qbtypes.Statement + expectedErr error }{ { - name: "body_contains", + name: "fts", requestType: qbtypes.RequestTypeRaw, query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ Signal: telemetrytypes.SignalLogs, @@ -1056,13 +1056,30 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) { }, enableUseJSONBody: true, expected: qbtypes.Statement{ - Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", - Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, + Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", + Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, + Warnings: []string{querybuilder.BodyFullTextSearchDefaultWarning}, }, expectedErr: nil, }, { - name: "body_contains_disabled", + name: "fts_2", + requestType: qbtypes.RequestTypeRaw, + query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ + Signal: telemetrytypes.SignalLogs, + Filter: &qbtypes.Filter{Expression: "error"}, + Limit: 10, + }, + enableUseJSONBody: true, + expected: qbtypes.Statement{ + Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?", + Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10}, + Warnings: []string{querybuilder.BodyFullTextSearchDefaultWarning}, + }, + expectedErr: nil, + }, + { + name: "fts_disabled", requestType: qbtypes.RequestTypeRaw, query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ Signal: telemetrytypes.SignalLogs, diff --git a/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py b/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py index 8f893b8a5a..db7bafbdd3 100644 --- a/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py +++ b/tests/integration/tests/querier_json_body/01_logs_json_body_new_qb.py @@ -1212,13 +1212,21 @@ def test_message_searches( "aggregation": "count()", "validate": lambda r: len(get_rows(r)) == 2 and set(_body_messages(r)) == payment_messages, }, - # FTS — bare keyword + # FTS — String bare keyword { "name": "msg.fts_quoted", "requestType": "raw", "expression": '"Payment"', "aggregation": "count()", - "validate": lambda r: len(get_rows(r)) == 2 and all("Payment" in b.get("message", "") for b in _get_bodies(r)), + "validate": lambda r: len(get_rows(r)) == 2 and all("Payment" in b.get("message", "") for b in _get_bodies(r)) and r.json().get("data", {}).get("warning") is not None, + }, + # FTS — bare keyword + { + "name": "msg.fts_quoted_without_quotes", + "requestType": "raw", + "expression": "Payment", + "aggregation": "count()", + "validate": lambda r: len(get_rows(r)) == 2 and all("Payment" in b.get("message", "") for b in _get_bodies(r)) and r.json().get("data", {}).get("warning") is not None, }, # = operator via body.message — tests exact match path {