Compare commits

..

114 Commits

Author SHA1 Message Date
Piyush Singariya
8c47266094 chore: fmt 2026-04-27 19:41:20 +05:30
Piyush Singariya
672b0e79c3 fix: changes based on review 2026-04-27 19:37:00 +05:30
Piyush Singariya
d008e9dbc3 fix: py-lint 2026-04-27 18:09:01 +05:30
Piyush Singariya
06909ef1fe fix: minor fix 2026-04-27 17:56:56 +05:30
Piyush Singariya
750ae898e5 Merge branch 'main' into feat/json-qb-tests 2026-04-27 17:52:31 +05:30
Piyush Singariya
1411f472f4 chore: py-fmt 2026-04-27 17:45:34 +05:30
Piyush Singariya
1c6e355b47 chore: rename jsontypeexporter.py 2026-04-27 17:38:59 +05:30
Piyush Singariya
b73eb93cda fix: reuse querier fixtures 2026-04-27 17:35:36 +05:30
Piyush Singariya
655a657584 feat: check query log as fixture 2026-04-27 17:13:53 +05:30
Piyush Singariya
04905bcdf3 Merge branch 'main' into feat/json-qb-tests 2026-04-27 15:41:48 +05:30
Piyush Singariya
ec6c6348e3 Merge branch 'feat/json-qb-tests-go-only' into feat/json-qb-tests 2026-04-27 11:08:18 +05:30
Piyush Singariya
2c66fce0ca revert: mcp related changes 2026-04-27 11:06:43 +05:30
Piyush Singariya
4d79ff89c8 refactor(telemetrytypes): replace JSONDataTypeIndex with TelemetryFieldKeySkipIndex 2026-04-27 10:57:22 +05:30
Piyush Singariya
495f76cad4 fix: shift test files 2026-04-24 14:07:04 +05:30
Piyush Singariya
8cf780bc10 Merge remote-tracking branch 'origin/main' into feat/json-qb-tests 2026-04-24 11:18:42 +05:30
Piyush Singariya
e39e93b01a Merge branch 'main' into feat/json-qb-tests 2026-04-22 14:52:29 +05:30
Piyush Singariya
8fbc490673 ci: polluted test fix 2026-04-22 14:52:16 +05:30
Piyush Singariya
88c53c456d fix: tests 2026-04-22 13:42:22 +05:30
Piyush Singariya
8cda4644fb Merge branch 'main' into feat/json-qb-tests 2026-04-22 13:26:00 +05:30
Piyush Singariya
c3a73c6626 fix: index needs body prefix 2026-04-22 13:25:20 +05:30
Piyush Singariya
6be50ec7bc ci: fix go tests 2026-04-22 13:23:35 +05:30
Piyush Singariya
98cefa1975 fix: openapi ci 2026-04-22 13:23:23 +05:30
Piyush Singariya
43bbde1887 Merge branch 'main' into feat/json-qb-tests 2026-04-22 12:38:55 +05:30
Piyush Singariya
d91c870fb5 fix: change based on review 2026-04-22 12:38:12 +05:30
Piyush Singariya
5a441d3367 fix: change based on review 2026-04-22 12:07:13 +05:30
Piyush Singariya
ddf0975cf6 chore: comment removal 2026-04-21 17:41:40 +05:30
Piyush Singariya
3f8a5af14c test: enhanced tests to work with backtick required key 2026-04-21 16:10:59 +05:30
Piyush Singariya
65ce61c0dd Merge branch 'main' into feat/json-qb-tests 2026-04-21 15:33:59 +05:30
Piyush Singariya
1350dd8226 chore: some changes in test to improve quality 2026-04-21 15:33:45 +05:30
Piyush Singariya
14ecd0f8db chore: go mod tidy 2026-04-21 13:35:09 +05:30
Piyush Singariya
7eccbc96b6 chore: error fix and unused code 2026-04-21 13:33:18 +05:30
Piyush Singariya
0a595e1b71 chore: update migrator version to stable release 2026-04-21 13:30:12 +05:30
Piyush Singariya
e8ed7592e6 chore: update collector to stable release 2026-04-21 13:28:54 +05:30
Piyush Singariya
eee7788108 fix: replacing JSONIndex with TelemetryFieldKeyIndex 2026-04-21 13:25:06 +05:30
Piyush Singariya
0a37aa0bd1 Merge branch 'main' into feat/json-qb-tests 2026-04-21 10:59:26 +05:30
Piyush Singariya
7d7307a6eb Merge branch 'chore/json-changes' into feat/json-qb-tests 2026-04-16 11:50:14 +05:30
Piyush Singariya
91beeb4949 fix: int64 mapping 2026-04-16 11:49:40 +05:30
Piyush Singariya
9f8c3466b1 test: select orderby works 2026-04-16 11:49:15 +05:30
Piyush Singariya
701323c822 Merge branch 'chore/json-changes' into feat/json-qb-tests 2026-04-15 18:18:00 +05:30
Piyush Singariya
cc193c9be5 fix: issue with FuzzyMatching and API failing 2026-04-15 18:17:44 +05:30
Piyush Singariya
512dc579b1 ci: test with updated collector 2026-04-15 16:54:18 +05:30
Piyush Singariya
d904953afe Merge branch 'chore/json-changes' into feat/json-qb-tests 2026-04-15 16:08:58 +05:30
Piyush Singariya
9690c06cb2 fix: comment removed 2026-04-15 16:08:44 +05:30
Piyush Singariya
8c44925a67 fix: branches should tell missing array types 2026-04-15 16:08:14 +05:30
Piyush Singariya
1ea35a19d7 fix: update jsontypeexporter 2026-04-15 15:59:46 +05:30
Piyush Singariya
c27db82794 test: select order by tests added 2026-04-15 15:35:19 +05:30
Piyush Singariya
5d9685ad9e test: added indexed tests separately 2026-04-15 15:08:32 +05:30
Piyush Singariya
8d0cc2d99d Merge branch 'chore/json-changes' into feat/json-qb-tests 2026-04-15 14:52:49 +05:30
Piyush Singariya
9643fae27a fix: invalid index usage on terminal condition 2026-04-15 14:51:24 +05:30
Piyush Singariya
750394a73f Merge branch 'chore/json-changes' into feat/json-qb-tests 2026-04-15 14:18:53 +05:30
Piyush Singariya
19a65e80d8 Merge branch 'main' into chore/json-changes 2026-04-15 14:18:32 +05:30
Piyush Singariya
bbef04352e Merge branch 'chore/json-changes' into feat/json-qb-tests 2026-04-15 14:17:04 +05:30
Piyush Singariya
0758ff133b fix: json access plan remval of AvailableTypes 2026-04-15 14:16:23 +05:30
Piyush Singariya
74dceb844b test: integration test fix attempt 1 2026-04-15 13:54:30 +05:30
Piyush Singariya
0d0ebe8fe7 Merge branch 'main' into feat/json-qb-tests 2026-04-15 13:07:34 +05:30
Piyush Singariya
b35a6213a6 fix: remove unused promoted fixture 2026-04-15 13:05:52 +05:30
Piyush Singariya
9cef109158 fix: changes based on review 2026-04-15 13:00:14 +05:30
Piyush Singariya
bb308c263f chore: changes based on review 2026-04-15 11:30:29 +05:30
Piyush Singariya
67e1b82adb fix: name changed to field_map 2026-04-14 17:05:30 +05:30
Piyush Singariya
1e8c0f19f5 fix: added safeguards in plan generation 2026-04-14 16:17:44 +05:30
Piyush Singariya
c027181935 test: selectField tests added 2026-04-14 16:06:12 +05:30
Piyush Singariya
7122fb8b54 revert: remove db binaries 2026-04-14 14:56:46 +05:30
Piyush Singariya
67830c8a16 Merge branch 'main' into chore/json-changes 2026-04-14 14:38:53 +05:30
Piyush Singariya
096eee6435 Merge branch 'upgrade/collector@v0.144.3-rc.4' into chore/json-changes 2026-04-14 14:26:31 +05:30
Piyush Singariya
30f5f2f2f2 ci: test fix 2026-04-14 13:34:49 +05:30
Piyush Singariya
52adb84461 Merge branch 'upgrade/collector@v0.144.3-rc.4' into chore/json-changes 2026-04-14 13:20:29 +05:30
Piyush Singariya
63b7f15d0e fix: tests 2026-04-14 13:19:30 +05:30
Piyush Singariya
6d3c88ed21 chore: upgrade collector version v0.144.3-rc.4 2026-04-14 13:07:43 +05:30
Piyush Singariya
fc6a67aec1 Merge branch 'upgrade/collector@v0.144.3-rc.2' into chore/json-changes 2026-04-14 12:58:26 +05:30
Piyush Singariya
9f41499047 fix: go sum 2026-04-14 12:43:58 +05:30
Piyush Singariya
9cd554d293 fix: qbtoexpr tests 2026-04-14 12:43:07 +05:30
Piyush Singariya
415387edfc fix: upgraded collector version 2026-04-14 12:40:37 +05:30
Piyush Singariya
fa1bc3db9b ci: tests fixed 2026-04-14 11:49:35 +05:30
Piyush Singariya
6c6dad8a66 chore: remove redundent comparison 2026-04-13 17:18:16 +05:30
Piyush Singariya
7403f86773 fix: tons of changes 2026-04-13 13:44:57 +05:30
Piyush Singariya
9176ef0589 Merge branch 'main' into feat/json-qb-tests 2026-04-10 19:59:46 +05:30
Piyush Singariya
5c2a338189 test: updated validation 2026-04-10 19:59:29 +05:30
Piyush Singariya
704bab23cf Merge branch 'main' into feat/json-qb-tests 2026-04-09 18:04:25 +05:30
Piyush Singariya
371da26b3c fix: test 2026-04-09 18:03:57 +05:30
Piyush Singariya
97fbfbdc13 fix: type ambiguity 2026-04-09 13:58:13 +05:30
Piyush Singariya
4b112988ef Merge branch 'main' into feat/json-qb-tests 2026-04-09 12:19:26 +05:30
Piyush Singariya
a47ecf3907 test: with higher migrator version 2026-04-09 12:16:31 +05:30
Piyush Singariya
e4a78cf556 fix: dynamically change insert stmt for body_v2 availability 2026-04-08 22:00:05 +05:30
Piyush Singariya
b6adecc294 fix: better validations 2026-04-08 19:29:28 +05:30
Piyush Singariya
40333a5fee Merge branch 'main' into feat/json-qb-tests 2026-04-08 19:08:34 +05:30
Piyush Singariya
4af6a9abae Merge branch 'main' into feat/json-qb-tests 2026-04-07 21:49:21 +05:30
Piyush Singariya
55e892dad3 fix: body tests ready 2026-04-07 16:22:26 +05:30
Piyush Singariya
181116308f fix: logs.py 2026-04-07 15:56:21 +05:30
Piyush Singariya
eaa678910b fix: better tests 2026-04-07 14:49:52 +05:30
Piyush Singariya
e994caeb02 chore: import tests from older pr 2026-04-07 13:43:16 +05:30
Piyush Singariya
10840f8495 ci: lint changes 2026-04-07 12:31:22 +05:30
Piyush Singariya
1fcd3adfc8 Merge branch 'main' into fix/array-json 2026-04-07 12:29:35 +05:30
Piyush Singariya
3e14b26b00 fix: negative operator check 2026-04-07 12:26:44 +05:30
Piyush Singariya
b30bfa6371 fix: better review for test file 2026-04-02 12:53:44 +05:30
Piyush Singariya
e7f4a04b36 Merge branch 'main' into fix/array-json 2026-04-01 15:44:01 +05:30
Piyush Singariya
0687634da3 fix: stringified integer value input 2026-04-01 15:25:35 +05:30
Piyush Singariya
7e7732243e fix: dynamic array tests 2026-04-01 12:54:26 +05:30
Piyush Singariya
2f952e402f feat: change filtering of dynamic arrays 2026-04-01 12:09:32 +05:30
Piyush Singariya
a12febca4a fix: array json element comparison 2026-04-01 10:34:55 +05:30
Piyush Singariya
cb71c9c3f7 Merge branch 'main' into fix/array-json 2026-03-31 15:42:09 +05:30
Piyush Singariya
1cd4ce6509 Merge branch 'main' into fix/array-json 2026-03-31 14:55:36 +05:30
Piyush Singariya
9299c8ab18 fix: indexed unit tests 2026-03-30 15:47:47 +05:30
Piyush Singariya
24749de269 fix: comment 2026-03-30 15:16:28 +05:30
Piyush Singariya
39098ec3f4 fix: unit tests 2026-03-30 15:12:17 +05:30
Piyush Singariya
fe554f5c94 fix: remove not used paths from testdata 2026-03-30 14:24:48 +05:30
Piyush Singariya
8a60a041a6 fix: unit tests 2026-03-30 14:14:49 +05:30
Piyush Singariya
541f19c34a fix: array type filtering from dynamic arrays 2026-03-30 12:59:31 +05:30
Piyush Singariya
010db03d6e fix: indexed tests passing 2026-03-30 12:24:26 +05:30
Piyush Singariya
5408acbd8c fix: primitive conditions working 2026-03-30 12:01:35 +05:30
Piyush Singariya
0de6c85f81 feat: align negative operators to include other logs 2026-03-28 10:30:11 +05:30
Piyush Singariya
69ec24fa05 test: fix unit tests 2026-03-27 15:12:49 +05:30
Piyush Singariya
539d732b65 fix: contextual path index usage 2026-03-27 14:44:51 +05:30
Piyush Singariya
843d5fb199 Merge branch 'main' into feat/json-index 2026-03-27 14:17:52 +05:30
Piyush Singariya
fabdfb8cc1 feat: enable JSON Path index 2026-03-27 14:07:37 +05:30
29 changed files with 2493 additions and 107 deletions

View File

@@ -51,6 +51,7 @@ jobs:
- role
- rootuser
- serviceaccount
- querier_json_body
- ttl
sqlstore-provider:
- postgres
@@ -61,7 +62,7 @@ jobs:
- 25.5.6
- 25.12.5
schema-migrator-version:
- v0.142.0
- v0.144.3
postgres-version:
- 15
if: |

View File

@@ -1,5 +0,0 @@
{
"$schema": "https://unpkg.com/knip@5/schema.json",
"project": ["src/**/*.ts", "src/**/*.tsx"],
"ignore": ["src/api/generated/**/*.ts"]
}

View File

@@ -27,7 +27,6 @@ const interestedInOptions: Record<string, string> = {
singleTool:
'Single Tool (logs, metrics & traces) to reduce operational overhead',
correlateSignals: 'Correlate signals for faster troubleshooting',
openSourceTooling: 'Prefer open-source tooling',
};
export function AboutSigNozQuestions({

View File

@@ -0,0 +1,22 @@
import ROUTES from 'constants/routes';
import CreateAlertChannels from 'container/CreateAlertChannels';
import { ChannelType } from 'container/CreateAlertChannels/config';
import GeneralSettings from 'container/GeneralSettings';
import { t } from 'i18next';
export const alertsRoutesConfig = [
{
Component: GeneralSettings,
name: t('routes.general'),
route: ROUTES.SETTINGS,
key: ROUTES.SETTINGS,
},
{
Component: (): JSX.Element => (
<CreateAlertChannels preType={ChannelType.Slack} />
),
name: t('routes.alert_channels'),
route: ROUTES.CHANNELS_NEW,
key: ROUTES.CHANNELS_NEW,
},
];

View File

@@ -0,0 +1,19 @@
import { useLocation } from 'react-router-dom';
import RouteTab from 'components/RouteTab';
import history from 'lib/history';
import { alertsRoutesConfig } from './config';
function SettingsPage(): JSX.Element {
const { pathname } = useLocation();
return (
<RouteTab
history={history}
routes={alertsRoutesConfig}
activeKey={pathname}
/>
);
}
export default SettingsPage;

View File

@@ -0,0 +1,54 @@
import { useMemo } from 'react';
import { Color } from '@signozhq/design-tokens';
import { CircleCheck, Siren } from 'lucide-react';
import { getDurationFromNow } from 'utils/timeUtils';
import { AlertStatusProps, StatusConfig } from './types';
import './AlertStatus.styles.scss';
export default function AlertStatus({
status,
timestamp,
}: AlertStatusProps): JSX.Element {
const statusConfig: StatusConfig = useMemo(
() => ({
firing: {
icon: <Siren size={14} color={Color.TEXT_VANILLA_400} />,
text: 'Firing since',
extraInfo: timestamp ? (
<>
<div></div>
<div className="time">{getDurationFromNow(timestamp)}</div>
</>
) : null,
className: 'alert-status-info--firing',
},
resolved: {
icon: (
<CircleCheck
size={14}
fill={Color.BG_VANILLA_400}
color={Color.BG_INK_400}
/>
),
text: 'Resolved',
extraInfo: null,
className: 'alert-status-info--resolved',
},
}),
[timestamp],
);
const currentStatus = statusConfig[status];
return (
<div className={`alert-status-info ${currentStatus.className}`}>
<div className="alert-status-info__icon">{currentStatus.icon}</div>
<div className="alert-status-info__details">
<div className="text">{currentStatus.text}</div>
{currentStatus.extraInfo}
</div>
</div>
);
}

View File

@@ -0,0 +1,18 @@
export type AlertStatusProps =
| { status: 'firing'; timestamp: number }
| { status: 'resolved'; timestamp?: number };
export type StatusConfig = {
firing: {
icon: JSX.Element;
text: string;
extraInfo: JSX.Element | null;
className: string;
};
resolved: {
icon: JSX.Element;
text: string;
extraInfo: JSX.Element | null;
className: string;
};
};

View File

@@ -0,0 +1,3 @@
import AlertHistory from 'container/AlertHistory';
export default AlertHistory;

View File

@@ -0,0 +1,86 @@
import {
FiltersType,
IQuickFiltersConfig,
} from 'components/QuickFilters/types';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { DataSource } from 'types/common/queryBuilder';
export const ExceptionsQuickFiltersConfig: IQuickFiltersConfig[] = [
{
type: FiltersType.CHECKBOX,
title: 'Environment',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'deployment.environment',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: true,
},
{
type: FiltersType.CHECKBOX,
title: 'Service Name',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'service.name',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: false,
},
{
type: FiltersType.CHECKBOX,
title: 'Hostname',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'host.name',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: false,
},
{
type: FiltersType.CHECKBOX,
title: 'K8s Cluster Name',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'k8s.cluster.name',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: false,
},
{
type: FiltersType.CHECKBOX,
title: 'K8s Deployment Name',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'k8s.deployment.name',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: false,
},
{
type: FiltersType.CHECKBOX,
title: 'K8s Namespace Name',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'k8s.namespace.name',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: false,
},
{
type: FiltersType.CHECKBOX,
title: 'K8s Pod Name',
dataSource: DataSource.TRACES,
attributeKey: {
key: 'k8s.pod.name',
dataType: DataTypes.String,
type: 'resource',
},
defaultOpen: false,
},
];

View File

@@ -0,0 +1,13 @@
import BillingContainer from 'container/BillingContainer/BillingContainer';
import './BillingPage.styles.scss';
function BillingPage(): JSX.Element {
return (
<div className="billingPageContainer">
<BillingContainer />
</div>
);
}
export default BillingPage;

View File

@@ -0,0 +1,3 @@
import BillingPage from './BillingPage';
export default BillingPage;

View File

@@ -0,0 +1,18 @@
import { Typography } from 'antd';
import styled from 'styled-components';
export const Title = styled(Typography)`
&&& {
margin-top: 1rem;
margin-bottom: 1rem;
}
`;
export const ButtonContainer = styled.div`
&&& {
display: flex;
justify-content: flex-end;
align-items: center;
margin-top: 1rem;
}
`;

View File

@@ -0,0 +1,3 @@
import HomePage from './HomePage';
export default HomePage;

View File

@@ -0,0 +1,14 @@
import { Col } from 'antd';
import { themeColors } from 'constants/theme';
import styled from 'styled-components';
export const WrapperStyled = styled.div`
display: flex;
flex-direction: column;
flex: 1;
color: ${themeColors.lightWhite};
`;
export const ButtonWrapperStyled = styled(Col)`
margin-left: auto;
`;

View File

@@ -0,0 +1,7 @@
export const removeSourcePageFromPath = (path: string): string => {
const lastSlashIndex = path.lastIndexOf('/');
if (lastSlashIndex !== -1) {
return path.substring(0, lastSlashIndex);
}
return path;
};

View File

@@ -0,0 +1,6 @@
import MySettingsContainer from 'container/MySettings';
function MySettings(): JSX.Element {
return <MySettingsContainer />;
}
export default MySettings;

View File

@@ -0,0 +1,25 @@
import { Button, Typography } from 'antd';
import SomethingWentWrongAsset from 'assets/SomethingWentWrong';
import { Container } from 'components/NotFound/styles';
import ROUTES from 'constants/routes';
import history from 'lib/history';
function SomethingWentWrong(): JSX.Element {
return (
<Container>
<SomethingWentWrongAsset />
<Typography.Title level={3}>Oops! Something went wrong</Typography.Title>
<Button
type="primary"
onClick={(): void => {
history.push(ROUTES.HOME);
}}
className="periscope-btn primary"
>
Return to Home
</Button>
</Container>
);
}
export default SomethingWentWrong;

View File

@@ -0,0 +1,10 @@
import styled from 'styled-components';
export const Container = styled.div`
margin: 1rem 0;
`;
export const ActionsWrapper = styled.div`
display: flex;
justify-content: flex-end;
`;

View File

@@ -11,7 +11,9 @@ import (
"github.com/uptrace/bun"
)
var ErrCodeInvalidPlannedMaintenancePayload = errors.MustNewCode("invalid_planned_maintenance_payload")
var (
ErrCodeInvalidPlannedMaintenancePayload = errors.MustNewCode("invalid_planned_maintenance_payload")
)
type MaintenanceStatus struct {
valuer.String
@@ -61,15 +63,15 @@ type StorablePlannedMaintenance struct {
}
type PlannedMaintenance struct {
ID valuer.UUID `json:"id" required:"true"`
Name string `json:"name" required:"true"`
Description string `json:"description"`
Schedule *Schedule `json:"schedule" required:"true"`
RuleIDs []string `json:"alertIds"`
CreatedAt time.Time `json:"createdAt"`
CreatedBy string `json:"createdBy"`
UpdatedAt time.Time `json:"updatedAt"`
UpdatedBy string `json:"updatedBy"`
ID valuer.UUID `json:"id" required:"true"`
Name string `json:"name" required:"true"`
Description string `json:"description"`
Schedule *Schedule `json:"schedule" required:"true"`
RuleIDs []string `json:"alertIds"`
CreatedAt time.Time `json:"createdAt"`
CreatedBy string `json:"createdBy"`
UpdatedAt time.Time `json:"updatedAt"`
UpdatedBy string `json:"updatedBy"`
Status MaintenanceStatus `json:"status" required:"true"`
Kind MaintenanceKind `json:"kind" required:"true"`
}
@@ -159,11 +161,8 @@ func (m *PlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bool {
currentTime := now.In(loc)
// fixed schedule — only when no recurrence is configured.
// When recurrence is set, the recurring check below handles everything;
// falling through here would cause the window to match the absolute
// StartTimeEndTime range instead of the daily/weekly/monthly pattern.
if m.Schedule.Recurrence == nil && !m.Schedule.StartTime.IsZero() && !m.Schedule.EndTime.IsZero() {
// fixed schedule
if !m.Schedule.StartTime.IsZero() && !m.Schedule.EndTime.IsZero() {
startTime := m.Schedule.StartTime.In(loc)
endTime := m.Schedule.EndTime.In(loc)
if currentTime.Equal(startTime) || currentTime.Equal(endTime) ||

View File

@@ -13,6 +13,7 @@ func timePtr(t time.Time) *time.Time {
}
func TestShouldSkipMaintenance(t *testing.T) {
cases := []struct {
name string
maintenance *PlannedMaintenance
@@ -498,7 +499,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
},
},
},
ts: time.Date(2024, 4, 1, 12, 10, 0, 0, time.UTC),
ts: time.Date(2024, 04, 1, 12, 10, 0, 0, time.UTC),
skip: true,
},
{
@@ -507,14 +508,14 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
},
},
ts: time.Date(2024, 4, 15, 12, 10, 0, 0, time.UTC),
ts: time.Date(2024, 04, 15, 12, 10, 0, 0, time.UTC),
skip: true,
},
{
@@ -523,14 +524,14 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
},
},
ts: time.Date(2024, 4, 14, 12, 10, 0, 0, time.UTC), // 14th 04 is sunday
ts: time.Date(2024, 04, 14, 12, 10, 0, 0, time.UTC), // 14th 04 is sunday
skip: false,
},
{
@@ -539,14 +540,14 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
},
},
ts: time.Date(2024, 4, 16, 12, 10, 0, 0, time.UTC), // 16th 04 is tuesday
ts: time.Date(2024, 04, 16, 12, 10, 0, 0, time.UTC), // 16th 04 is tuesday
skip: false,
},
{
@@ -555,14 +556,14 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
},
},
ts: time.Date(2024, 5, 6, 12, 10, 0, 0, time.UTC),
ts: time.Date(2024, 05, 06, 12, 10, 0, 0, time.UTC),
skip: true,
},
{
@@ -571,14 +572,14 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
},
},
ts: time.Date(2024, 5, 6, 14, 0, 0, 0, time.UTC),
ts: time.Date(2024, 05, 06, 14, 00, 0, 0, time.UTC),
skip: true,
},
{
@@ -587,13 +588,13 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 4, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
},
ts: time.Date(2024, 4, 4, 12, 10, 0, 0, time.UTC),
ts: time.Date(2024, 04, 04, 12, 10, 0, 0, time.UTC),
skip: true,
},
{
@@ -602,13 +603,13 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 4, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
},
ts: time.Date(2024, 4, 4, 14, 10, 0, 0, time.UTC),
ts: time.Date(2024, 04, 04, 14, 10, 0, 0, time.UTC),
skip: false,
},
{
@@ -617,52 +618,13 @@ func TestShouldSkipMaintenance(t *testing.T) {
Schedule: &Schedule{
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 4, 12, 0, 0, 0, time.UTC),
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
},
ts: time.Date(2024, 5, 4, 12, 10, 0, 0, time.UTC),
skip: true,
},
// The recurrence should govern, when set. Not the fixed range.
{
name: "recurring-daily-with-fixed-times-outside-daily-window",
maintenance: &PlannedMaintenance{
Schedule: &Schedule{
Timezone: "UTC",
// These fixed fields should be ignored when Recurrence is set.
StartTime: time.Date(2026, 4, 1, 10, 0, 0, 0, time.UTC),
EndTime: time.Date(2026, 4, 30, 18, 0, 0, 0, time.UTC),
Recurrence: &Recurrence{
StartTime: time.Date(2026, 4, 1, 14, 0, 0, 0, time.UTC), // daily at 14:00
Duration: valuer.MustParseTextDuration("2h"), // until 16:00
RepeatType: RepeatTypeDaily,
},
},
},
// 11:00 is inside the fixed range but outside the daily 14:00-16:00 window.
// Before the fix this returned true (bug); after fix it returns false.
ts: time.Date(2026, 4, 15, 11, 0, 0, 0, time.UTC),
skip: false,
},
{
name: "recurring-daily-with-fixed-times-inside-daily-window",
maintenance: &PlannedMaintenance{
Schedule: &Schedule{
Timezone: "UTC",
StartTime: time.Date(2026, 4, 1, 10, 0, 0, 0, time.UTC),
EndTime: time.Date(2026, 4, 30, 18, 0, 0, 0, time.UTC),
Recurrence: &Recurrence{
StartTime: time.Date(2026, 4, 1, 14, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
},
// 15:00 is inside the daily 14:00-16:00 window — should skip.
ts: time.Date(2026, 4, 15, 15, 0, 0, 0, time.UTC),
ts: time.Date(2024, 05, 04, 12, 10, 0, 0, time.UTC),
skip: true,
},
}

View File

@@ -23,6 +23,7 @@ pytest_plugins = [
"fixtures.notification_channel",
"fixtures.alerts",
"fixtures.cloudintegrations",
"fixtures.jsontypes",
"fixtures.seeder",
]
@@ -79,6 +80,6 @@ def pytest_addoption(parser: pytest.Parser):
parser.addoption(
"--schema-migrator-version",
action="store",
default="v0.144.2",
default="v0.144.3",
help="schema migrator version",
)

View File

@@ -1,5 +1,6 @@
import os
from collections.abc import Generator
from collections.abc import Callable, Generator
from datetime import datetime
from typing import Any
import clickhouse_connect
@@ -262,3 +263,76 @@ def clickhouse(
delete=delete,
restore=restore,
)
@pytest.fixture(name="check_query_log")
def check_query_log(
signoz: types.SigNoz,
) -> Callable[..., None]:
"""
Returns a callable that flushes system.query_log and asserts that at
least one recent SELECT satisfies check_fn.
Args:
after_ts: Only consider queries logged after this timestamp.
case_name: Label used in assertion failure messages.
check_fn: Predicate run against each candidate query string.
tables: Filter to queries that touched all of these tables, as
'db.table' strings (uses hasAll(tables, [...])).
must_contain: Substrings that must appear in the query text (AND-ed).
must_not_contain: Substrings that must not appear in the query text (AND-ed).
limit: How many most-recent queries to examine (default 10).
Usage:
before = datetime.now(tz=timezone.utc)
# ... trigger the query under test ...
check_query_log(
before, "my.case",
lambda q: "assumeNotNull" in q,
tables=["signoz_logs.distributed_logs_v2"],
)
"""
def _check(
after_ts: datetime,
case_name: str,
check_fn: Callable[[str], bool],
*,
tables: list[str] | None = None,
must_contain: list[str] | None = None,
must_not_contain: list[str] | None = None,
limit: int = 10,
) -> None:
conn = signoz.telemetrystore.conn
conn.command("SYSTEM FLUSH LOGS")
# Use millisecond precision to avoid timestamp collisions between
# adjacent test cases (second-level precision causes bleed-through).
params: dict = {"after_ms": int(after_ts.timestamp() * 1000)}
conditions = [
"type = 'QueryFinish'",
"query_kind = 'Select'",
"toUnixTimestamp64Milli(event_time_microseconds) >= %(after_ms)s",
]
if tables:
params["tables"] = tables
conditions.append("hasAll(tables, %(tables)s)")
for i, pattern in enumerate(must_contain or []):
key = f"mc_{i}"
params[key] = pattern
conditions.append(f"position(query, %({key})s) > 0")
for i, pattern in enumerate(must_not_contain or []):
key = f"mnc_{i}"
params[key] = pattern
conditions.append(f"position(query, %({key})s) = 0")
where = " AND ".join(conditions)
result = conn.query(
f"SELECT query FROM system.query_log WHERE {where} ORDER BY event_time_microseconds DESC LIMIT {limit}",
parameters=params,
)
queries = [row[0] for row in result.result_rows]
assert queries, f"No matching SELECT in system.query_log for case '{case_name}'"
assert all(check_fn(q) for q in queries), f"query_log check failed for case '{case_name}'.\n" + "Queries:\n" + "\n---\n".join(queries)
return _check

441
tests/fixtures/jsontypes.py vendored Normal file
View File

@@ -0,0 +1,441 @@
"""
Simpler version of metadataexporter for exporting jsontypes for test fixtures.
This exports JSON type metadata to the path_types table by parsing JSON bodies
and extracting all paths with their types, similar to how the real metadataexporter works.
"""
import datetime
import json
from abc import ABC
from collections.abc import Callable, Generator
from http import HTTPStatus
from typing import (
Any,
)
import numpy as np
import pytest
import requests
from fixtures import types
class JSONPathType(ABC):
"""Represents a JSON path with its type information"""
field_name: str
field_data_type: str
last_seen: np.uint64
signal: str = "logs"
field_context: str = "body"
def __init__(
self,
field_name: str,
field_data_type: str,
last_seen: datetime.datetime | None = None,
) -> None:
self.field_name = field_name
self.field_data_type = field_data_type
self.signal = "logs"
self.field_context = "body"
if last_seen is None:
last_seen = datetime.datetime.now()
self.last_seen = np.uint64(int(last_seen.timestamp() * 1e9))
def np_arr(self) -> np.array:
"""Return path type data as numpy array for database insertion"""
return np.array([self.signal, self.field_context, self.field_name, self.field_data_type, self.last_seen])
# Constants matching metadataexporter
ARRAY_SEPARATOR = "[]." # Used in paths like "education[].name"
ARRAY_SUFFIX = "[]" # Used when traversing into array element objects
def _infer_array_type_from_type_strings(types: list[str]) -> str | None:
"""
Infer array type from a list of pre-classified type strings.
Matches metadataexporter's inferArrayMask logic.
Internal type strings are: "JSON", "String", "Bool", "Float64", "Int64"
SuperTyping rules (matching Go inferArrayMask):
- JSON alone → []json
- JSON + any primitive → []dynamic
- String alone → []string; String + other → []dynamic
- Float64 wins over Int64 and Bool
- Int64 wins over Bool
- Bool alone → []bool
"""
if len(types) == 0:
return None
unique = set(types)
has_json = "JSON" in unique
# hasPrimitive mirrors Go: (hasJSON && len(unique) > 1) || (!hasJSON && len(unique) > 0)
has_primitive = (has_json and len(unique) > 1) or (not has_json and len(unique) > 0)
if has_json:
if not has_primitive:
return "[]json"
return "[]dynamic"
# ---- Primitive Type Resolution (Float > Int > Bool) ----
if "String" in unique:
if len(unique) > 1:
return "[]dynamic"
return "[]string"
if "Float64" in unique:
return "[]float64"
if "Int64" in unique:
return "[]int64"
if "Bool" in unique:
return "[]bool"
return "[]dynamic"
def _infer_array_type(elements: list[Any]) -> str | None:
"""
Infer array type from raw Python list elements.
Classifies each element then delegates to _infer_array_type_from_type_strings.
"""
if len(elements) == 0:
return None
types = []
for elem in elements:
if elem is None:
continue
if isinstance(elem, dict):
types.append("JSON")
elif isinstance(elem, str):
types.append("String")
elif isinstance(elem, bool): # must be before int (bool is subclass of int)
types.append("Bool")
elif isinstance(elem, float):
types.append("Float64")
elif isinstance(elem, int):
types.append("Int64")
return _infer_array_type_from_type_strings(types)
def _python_type_to_clickhouse_type(value: Any) -> str:
"""
Convert Python type to ClickHouse JSON type string.
Maps Python types to ClickHouse JSON data types.
Matches metadataexporter's mapPCommonValueTypeToDataType.
"""
if isinstance(value, bool):
return "bool"
elif isinstance(value, int):
return "int64"
elif isinstance(value, float):
return "float64"
elif isinstance(value, str):
return "string"
elif isinstance(value, list):
# Use the sophisticated array type inference
array_type = _infer_array_type(value)
return array_type if array_type else "[]dynamic"
elif isinstance(value, dict):
return "json"
else:
return "string" # Default fallback
def _extract_json_paths(
obj: Any,
current_path: str = "",
path_types: dict[str, set[str]] | None = None,
level: int = 0,
) -> dict[str, set[str]]:
"""
Recursively extract all paths and their types from a JSON object.
Matches metadataexporter's analyzePValue logic.
Args:
obj: The JSON object to traverse
current_path: Current path being built (e.g., "user.name")
path_types: Dictionary mapping paths to sets of types found
level: Current nesting level (for depth limiting)
Returns:
Dictionary mapping paths to sets of type strings
"""
if path_types is None:
path_types = {}
if obj is None:
# Skip null values — matches Go walkNode which errors on ValueTypeEmpty
return path_types
if isinstance(obj, dict):
# For objects, recurse into keys without recording the object itself as a type.
# Matches Go walkMap which recurses without calling ta.record on the map node.
for key, value in obj.items():
# Build the path for this key
if current_path:
new_path = f"{current_path}.{key}"
else:
new_path = key
# Recurse into the value
_extract_json_paths(value, new_path, path_types, level + 1)
elif isinstance(obj, list):
# Skip empty arrays
if len(obj) == 0:
return path_types
# Collect types from array elements (matching Go: types := make([]pcommon.ValueType, 0, s.Len()))
types = []
for item in obj:
if isinstance(item, dict):
# When traversing into array element objects, use ArraySuffix ([])
# This matches: prefix+ArraySuffix in the Go code
# Example: if current_path is "education", we use "education[]" to traverse into objects
array_prefix = current_path + ARRAY_SUFFIX if current_path else ""
for key, value in item.items():
if array_prefix:
# Use array separator: education[].name
array_path = f"{array_prefix}.{key}"
else:
array_path = key
# Recurse without increasing level (matching Go behavior)
_extract_json_paths(value, array_path, path_types, level)
types.append("JSON")
elif isinstance(item, list):
# Arrays inside arrays are not supported - skip the whole path
# Matching Go: e.logger.Error("arrays inside arrays are not supported!", ...); return nil
return path_types
elif isinstance(item, str):
types.append("String")
elif isinstance(item, bool):
types.append("Bool")
elif isinstance(item, float):
types.append("Float64")
elif isinstance(item, int):
types.append("Int64")
# Infer array type from collected types (matching Go: if mask := inferArrayMask(types); mask != 0)
if len(types) > 0:
array_type = _infer_array_type_from_type_strings(types)
if array_type and current_path:
if current_path not in path_types:
path_types[current_path] = set()
path_types[current_path].add(array_type)
# Primitive value (string, number, bool)
elif current_path:
if current_path not in path_types:
path_types[current_path] = set()
obj_type = _python_type_to_clickhouse_type(obj)
path_types[current_path].add(obj_type)
return path_types
def _parse_json_bodies_and_extract_paths(
json_bodies: list[str],
timestamp: datetime.datetime | None = None,
) -> list[JSONPathType]:
"""
Parse JSON bodies and extract all paths with their types.
This mimics the behavior of metadataexporter.
Args:
json_bodies: List of JSON body strings to parse
timestamp: Timestamp to use for last_seen (defaults to now)
Returns:
List of JSONPathType objects with all discovered paths and types
"""
if timestamp is None:
timestamp = datetime.datetime.now()
# Aggregate all paths and their types across all JSON bodies
all_path_types: dict[str, set[str]] = {}
for json_body in json_bodies:
try:
parsed = json.loads(json_body)
_extract_json_paths(parsed, "", all_path_types, level=0)
except (json.JSONDecodeError, TypeError):
# Skip invalid JSON
continue
# Convert to list of JSONPathType objects
# Each path can have multiple types, so we create one JSONPathType per type
path_type_objects: list[JSONPathType] = []
for path, types_set in all_path_types.items():
for type_str in types_set:
path_type_objects.append(JSONPathType(field_name=path, field_data_type=type_str, last_seen=timestamp))
return path_type_objects
@pytest.fixture(name="export_json_types", scope="function")
def export_json_types(
clickhouse: types.TestContainerClickhouse,
) -> Generator[Callable[[list[JSONPathType] | list[str] | list[Any]], None], Any]:
"""
Fixture for exporting JSON type metadata to the path_types table.
This is a simpler version of metadataexporter for test fixtures.
The function can accept:
1. List of JSONPathType objects (manual specification)
2. List of JSON body strings (auto-extract paths)
3. List of Logs objects (extract from body_json field)
Usage examples:
# Manual specification
export_json_types([
JSONPathType(field_name="user.name", field_data_type="string"),
JSONPathType(field_name="user.age", field_data_type="int64"),
])
# Auto-extract from JSON strings
export_json_types([
'{"user": {"name": "alice", "age": 25}}',
'{"user": {"name": "bob", "age": 30}}',
])
# Auto-extract from Logs objects
export_json_types(logs_list)
"""
def _export_json_types(
data: list[JSONPathType] | list[str] | list[Any], # List[Logs] but avoiding circular import
) -> None:
"""
Export JSON type metadata to signoz_metadata.distributed_field_keys table.
This table stores signal, context, path, and type information for body JSON fields.
"""
path_types: list[JSONPathType] = []
if len(data) == 0:
return
# Determine input type and convert to JSONPathType list
first_item = data[0]
if isinstance(first_item, JSONPathType):
# Already JSONPathType objects
path_types = data # type: ignore
elif isinstance(first_item, str):
# List of JSON strings - parse and extract paths
path_types = _parse_json_bodies_and_extract_paths(data) # type: ignore
else:
# Assume it's a list of Logs objects - extract body_v2
json_bodies: list[str] = []
for log in data: # type: ignore
# Try to get body_v2 attribute
if hasattr(log, "body_v2") and log.body_v2:
json_bodies.append(log.body_v2)
elif hasattr(log, "body") and log.body:
# Fallback to body if body_v2 not available
try:
# Try to parse as JSON
json.loads(log.body)
json_bodies.append(log.body)
except (json.JSONDecodeError, TypeError):
pass
if json_bodies:
path_types = _parse_json_bodies_and_extract_paths(json_bodies)
if len(path_types) == 0:
return
clickhouse.conn.insert(
database="signoz_metadata",
table="distributed_field_keys",
data=[path_type.np_arr() for path_type in path_types],
column_names=[
"signal",
"field_context",
"field_name",
"field_data_type",
"last_seen",
],
)
yield _export_json_types
# Cleanup - truncate the local table after tests (following pattern from logs fixture)
clickhouse.conn.query(f"TRUNCATE TABLE signoz_metadata.field_keys ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC")
@pytest.fixture(name="create_json_index", scope="function")
def create_json_index(
signoz: types.SigNoz,
) -> Generator[Callable[[str, list[dict[str, Any]]], None]]:
"""
Create ClickHouse data-skipping indexes on body_v2 JSON sub-columns via
POST /api/v1/logs/promote_paths.
**Must be called BEFORE insert_logs** so that newly inserted data parts are
covered by the index and the QB uses the indexed condition path.
Each entry in `paths` follows the PromotePath API shape:
{
"path": "body.user.name", # must start with "body."
"indexes": [
{
"fieldDataType": "string", # string | int64 | float64
"type": "ngrambf_v1(3, 256, 2, 0)", # or "minmax", "tokenbf_v1(...)"
"granularity": 1,
}
],
}
Teardown drops every index created during the test by querying
system.data_skipping_indices for matching expressions.
Example::
def test_foo(signoz, get_token, insert_logs, export_json_types, create_json_body_index):
token = get_token(...)
export_json_types(logs_list)
create_json_body_index(token, [
{"path": "body.user.name",
"indexes": [{"fieldDataType": "string", "type": "ngrambf_v1(3, 256, 2, 0)", "granularity": 1}]},
{"path": "body.user.age",
"indexes": [{"fieldDataType": "int64", "type": "minmax", "granularity": 1}]},
])
insert_logs(logs_list) # data inserted after index exists — index is built automatically
"""
created_paths: list[str] = []
def _create_json_body_index(token: str, paths: list[dict[str, Any]]) -> None:
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/logs/promote_paths"),
headers={"authorization": f"Bearer {token}"},
json=paths,
timeout=30,
)
assert response.status_code == HTTPStatus.CREATED, f"Failed to create JSON body indexes: {response.status_code} {response.text}"
for path in paths:
# The API strips the "body." prefix before storing — mirror that here
# so our cleanup query uses the bare path (e.g. "user.name").
raw = path["path"].removeprefix("body.")
if raw not in created_paths:
created_paths.append(raw)
yield _create_json_body_index
if not created_paths:
return
cluster = signoz.telemetrystore.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER"]
for path in created_paths:
result = signoz.telemetrystore.conn.query(f"SELECT name FROM system.data_skipping_indices WHERE database = 'signoz_logs' AND table = 'logs_v2' AND expr LIKE '%{path}%'")
for (index_name,) in result.result_rows:
signoz.telemetrystore.conn.query(f"ALTER TABLE signoz_logs.logs_v2 ON CLUSTER '{cluster}' DROP INDEX IF EXISTS `{index_name}`")

View File

@@ -121,6 +121,8 @@ class Logs(ABC):
resources: dict[str, Any] = {},
attributes: dict[str, Any] = {},
body: str = "default body",
body_v2: str | None = None,
body_promoted: str | None = None,
severity_text: str = "INFO",
trace_id: str = "",
span_id: str = "",
@@ -166,6 +168,33 @@ class Logs(ABC):
# Set body
self.body = body
# Set body_v2 - if body is JSON, parse and stringify it, otherwise use empty string
# ClickHouse accepts String input for JSON column
if body_v2 is not None:
self.body_v2 = body_v2
else:
# Try to parse body as JSON; if successful use it directly,
# otherwise wrap as {"message": body} matching the normalize operator behavior.
try:
json.loads(body)
self.body_v2 = body
except (json.JSONDecodeError, TypeError):
self.body_v2 = json.dumps({"message": body})
# Set body_promoted - must be valid JSON
# Tests will explicitly pass promoted column's content, but we validate it
if body_promoted is not None:
# Validate that it's valid JSON
try:
json.loads(body_promoted)
self.body_promoted = body_promoted
except (json.JSONDecodeError, TypeError):
# If invalid, default to empty JSON object
self.body_promoted = "{}"
else:
# Default to empty JSON object (valid JSON)
self.body_promoted = "{}"
# Process resources and attributes
self.resources_string = {k: str(v) for k, v in resources.items()}
self.resource_json = {} if resource_write_mode == "legacy_only" else dict(self.resources_string)
@@ -309,6 +338,8 @@ class Logs(ABC):
self.severity_text,
self.severity_number,
self.body,
self.body_v2,
self.body_promoted,
self.attributes_string,
self.attributes_number,
self.attributes_bool,
@@ -436,31 +467,47 @@ def insert_logs_to_clickhouse(conn, logs: list[Logs]) -> None:
data=[resource_key.np_arr() for resource_key in resource_keys],
)
all_column_names = [
"ts_bucket_start",
"resource_fingerprint",
"timestamp",
"observed_timestamp",
"id",
"trace_id",
"span_id",
"trace_flags",
"severity_text",
"severity_number",
"body",
"body_v2",
"body_promoted",
"attributes_string",
"attributes_number",
"attributes_bool",
"resources_string",
"scope_name",
"scope_version",
"scope_string",
"resource",
]
result = conn.query("SELECT count() FROM system.columns WHERE database = 'signoz_logs' AND table = 'logs_v2' AND name = 'body_v2'")
has_json_body = result.result_rows[0][0] > 0
if has_json_body:
column_names = all_column_names
data = [log.np_arr() for log in logs]
else:
json_body_cols = {"body_v2", "body_promoted"}
keep_indices = [i for i, c in enumerate(all_column_names) if c not in json_body_cols]
column_names = [all_column_names[i] for i in keep_indices]
data = [log.np_arr()[keep_indices] for log in logs]
conn.insert(
database="signoz_logs",
table="distributed_logs_v2",
data=[log.np_arr() for log in logs],
column_names=[
"ts_bucket_start",
"resource_fingerprint",
"timestamp",
"observed_timestamp",
"id",
"trace_id",
"span_id",
"trace_flags",
"severity_text",
"severity_number",
"body",
"attributes_string",
"attributes_number",
"attributes_bool",
"resources_string",
"scope_name",
"scope_version",
"scope_string",
"resource",
],
data=data,
column_names=column_names,
)

View File

@@ -8,27 +8,32 @@ from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="migrator", scope="package")
def migrator(
def create_migrator(
network: Network,
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
cache_key: str = "migrator",
env_overrides: dict | None = None,
) -> types.Operation:
"""
Package-scoped fixture for running schema migrations.
Factory function for running schema migrations.
Accepts optional env_overrides to customize the migrator environment.
"""
def create() -> None:
version = request.config.getoption("--schema-migrator-version")
client = docker.from_env()
environment = dict(env_overrides) if env_overrides else {}
container = client.containers.run(
image=f"signoz/signoz-schema-migrator:{version}",
command=f"sync --replication=true --cluster-name=cluster --up= --dsn={clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN']}",
detach=True,
auto_remove=False,
network=network.id,
environment=environment,
)
result = container.wait()
@@ -47,6 +52,7 @@ def migrator(
detach=True,
auto_remove=False,
network=network.id,
environment=environment,
)
result = container.wait()
@@ -59,7 +65,7 @@ def migrator(
container.remove()
return types.Operation(name="migrator")
return types.Operation(name=cache_key)
def delete(_: types.Operation) -> None:
pass
@@ -70,9 +76,27 @@ def migrator(
return reuse.wrap(
request,
pytestconfig,
"migrator",
cache_key,
lambda: types.Operation(name=""),
create,
delete,
restore,
)
@pytest.fixture(name="migrator", scope="package")
def migrator(
network: Network,
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
) -> types.Operation:
"""
Package-scoped fixture for running schema migrations.
"""
return create_migrator(
network=network,
clickhouse=clickhouse,
request=request,
pytestconfig=pytestconfig,
)

View File

@@ -500,6 +500,14 @@ def get_scalar_columns(response_json: dict) -> list[dict]:
return results[0].get("columns", [])
def get_rows(response: requests.Response) -> list[dict[str, Any]]:
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
# The server returns rows:null (not []) when there are 0 matching logs.
return results[0].get("rows") or []
def get_column_data_from_response(response_json: dict, column_name: str) -> list[Any]:
results = response_json.get("data", {}).get("data", {}).get("results", [])
if not results:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,67 @@
import pytest
from testcontainers.core.container import Network
from fixtures import types
from fixtures.migrator import create_migrator
from fixtures.signoz import create_signoz
UNSUPPORTED_CLICKHOUSE_VERSIONS = {"25.5.6"}
def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
version = config.getoption("--clickhouse-version")
if version in UNSUPPORTED_CLICKHOUSE_VERSIONS:
skip = pytest.mark.skip(reason=f"JSON body QB tests require ClickHouse > {version}")
for item in items:
item.add_marker(skip)
@pytest.fixture(name="migrator", scope="package")
def migrator_json(
network: Network,
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
) -> types.Operation:
"""
Package-scoped migrator with ENABLE_LOGS_MIGRATIONS_V2=1.
"""
return create_migrator(
network=network,
clickhouse=clickhouse,
request=request,
pytestconfig=pytestconfig,
cache_key="migrator-json-body",
env_overrides={
"ENABLE_LOGS_MIGRATIONS_V2": "1",
},
)
@pytest.fixture(name="signoz", scope="package")
def signoz_json_body(
network: Network,
migrator: types.Operation, # pylint: disable=unused-argument
zeus: types.TestContainerDocker,
gateway: types.TestContainerDocker,
sqlstore: types.TestContainerSQL,
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
) -> types.SigNoz:
"""
Package-scoped fixture for SigNoz with BODY_JSON_QUERY_ENABLED=true.
"""
return create_signoz(
network=network,
zeus=zeus,
gateway=gateway,
sqlstore=sqlstore,
clickhouse=clickhouse,
request=request,
pytestconfig=pytestconfig,
cache_key="signoz-json-body",
env_overrides={
"BODY_JSON_QUERY_ENABLED": "true",
},
)