Compare commits

...

36 Commits

Author SHA1 Message Date
Shaheer Kochai
1b48eb81c6 feat: add timezone support to the graphs throughout the app (#6520)
...
2024-12-03 13:56:48 +04:30
ahmadshaheer
504811546e chore: added null check for timezone.value in updateTimezone 2024-12-02 19:40:27 +04:30
ahmadshaheer
e012f10395 chore: overall improvements 2024-11-20 18:42:41 +04:30
ahmadshaheer
676e32ea09 chore: store timezone string in local storage instead of object 2024-11-20 18:20:48 +04:30
ahmadshaheer
28045772b8 fix: get active timezone in timepicker hint 2024-11-20 09:23:52 +04:30
ahmadshaheer
b1120c7d16 fix: if timezone is undefined, fallback to browser time zone 2024-11-20 09:19:55 +04:30
ahmadshaheer
55c9205aad feat: timezone setting functionality in timepicker, timezone picker, and timezone preferences 2024-11-20 09:19:55 +04:30
ahmadshaheer
5b4f423f9f feat: create a context for timezone and handle the timezone configuration 2024-11-20 09:19:16 +04:30
ahmadshaheer
cc376ce6a8 chore: improve timezoneUtils by adding a function to get browser timezone 2024-11-20 09:19:16 +04:30
ahmadshaheer
20e00c597a fix: display the timezone in timepicker hint 'You are at' 2024-11-20 09:18:43 +04:30
ahmadshaheer
ff7da5c05b fix: fix the issue of timezone breaking for browser and utc timezones 2024-11-19 18:42:20 +04:30
ahmadshaheer
14ccadaeb5 fix: don't focus on time picker when timezone is clicked 2024-11-19 13:55:42 +04:30
ahmadshaheer
984f3829dd chore: fix the typo 2024-11-19 13:37:18 +04:30
ahmadshaheer
31a9ead2fc feat: display timezone in timepicker input 2024-11-19 13:34:15 +04:30
ahmadshaheer
65ce8eaf14 chore: change timezone item from div to button 2024-11-19 13:33:14 +04:30
ahmadshaheer
daec491c79 chore: improve timezone utils 2024-11-19 13:29:33 +04:30
ahmadshaheer
49e29567f4 feat: timezone preferences UI 2024-11-19 10:02:00 +04:30
ahmadshaheer
8edd5fe7d6 fix: overall improvement + add searchIndex to timezone 2024-11-18 18:49:22 +04:30
ahmadshaheer
178a3153dd chore: add the selected timezone as url param and close timezone picker on select 2024-11-18 18:38:45 +04:30
ahmadshaheer
e7f1b27a5b feat: add support for esc keypress to close the timezone picker 2024-11-18 18:27:38 +04:30
ahmadshaheer
dbf0f236be feat: time picker hint and timezone picker UI with basic functionality + helper to get timezones 2024-11-18 17:47:48 +04:30
Vikrant Gupta
d93f72f18d chore: use the license v2 key to fill licenses v3 on startup (#6468)
* feat: use the license v2 key to fill licenses v3 on startup

* chore: make the init only if the licenses v2 is present

* chore: address review comments
2024-11-18 17:55:00 +05:30
Shaheer Kochai
a59e7b9dfb feat: add 'create channel' option in channels list and refetch alert channels on opening the channels dropdown (#6416)
* feat: add channel creation option and auto-refresh channels list on dropdown open

* chore: move inline styles to style.ts

* fix: show the prompt to ask admin if the user doesn't have permissions

* fix: display create channel option only if the user has permission

* fix: prevent repeated new alert event logs + log new channel option inside dropdown
2024-11-18 06:30:06 +00:00
Nityananda Gohain
91bbeaf175 fix: remove unwanted trace API's (#6464) 2024-11-18 10:27:08 +05:30
Yunus M
22e61e1605 [Snyk] Security upgrade alpine from 3.18.6 to 3.20.3 (#6463)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-ALPINE318-BUSYBOX-6913411
- https://snyk.io/vuln/SNYK-ALPINE318-BUSYBOX-7249236
- https://snyk.io/vuln/SNYK-ALPINE318-BUSYBOX-7249265
- https://snyk.io/vuln/SNYK-ALPINE318-BUSYBOX-7249265
- https://snyk.io/vuln/SNYK-ALPINE318-BUSYBOX-7249419

Co-authored-by: snyk-bot <snyk-bot@snyk.io>
2024-11-17 21:56:15 +05:30
Srikanth Chekuri
656d1c2b1c chore: add missing alert telemetry (#6459) 2024-11-16 19:16:05 +00:00
Srikanth Chekuri
493ae4fd07 chore: add user email to log_comment (#6461) 2024-11-17 00:36:10 +05:30
Srikanth Chekuri
cd1ec561b1 fix: compare op outside bounds for anomaly alert (#6458) 2024-11-16 20:17:34 +05:30
Nityananda Gohain
0acf39a532 feat: support for new enrichment logic in traces (#6438)
* feat: support for new enrichment logic in traces

* fix: default test added

* fix: update func name in links

* Update pkg/query-service/utils/logs_test.go

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2024-11-16 15:19:25 +05:30
Nityananda Gohain
35f4eaa23b fix: update logs struct to fix live logs (#6453) 2024-11-15 22:42:16 +05:30
Nityananda Gohain
77c5f17dce feat: support for window based pagination in new trace v4 (#6440)
* feat: support for window based pagination in new trace v4

* fix: update pagination logic

* fix: update comment

* fix: substract correct length

* fix: revert changes

* fix: add tests for querier

* fix: rename matcher

* fix: handle offset inmemory for list queries in traces

* fix: correct var name

* fix: add max pagination limit for traces
2024-11-15 22:13:28 +05:30
SagarRajput-7
c1478c4e54 feat: removed dashboard uuid is all cases be it duplicate, empty or somevalid, while import json (#6448)
* feat: removed dashboard uuid is all cases be it duplicate, empty or somevalid, while import json

* feat: added comment to better explain the logic
2024-11-15 19:35:29 +05:30
Yunus M
371224a64a fix: show org onboarding only to cloud customers (#6451) 2024-11-15 19:12:38 +05:30
Yunus M
504bc0d541 feat: ingestion limits - add toggle feature (#6430) 2024-11-15 08:32:31 +00:00
Nityananda Gohain
2faa0c6d4f feat: trace V4 QB (#6407)
* feat: trace V4 QB

* fix: update get column name and remove id

* fix: handle contains and update tests

* fix: remove unwanted step interval calculation

* fix: add test cases

* fix: add tests for static columns in QB

* fix: add more order by tests

* fix: update order by logic
2024-11-13 20:30:01 +05:30
Srikanth Chekuri
969ac5028e chore: add v2 metric writer to pipelines (#6345) 2024-11-13 10:41:28 +00:00
91 changed files with 4332 additions and 1467 deletions

View File

@@ -66,28 +66,6 @@ processors:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure.
timeout: 2s
signozspanmetrics/cumulative:
metrics_exporter: clickhousemetricswrite
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
dimensions:
- name: service.namespace
default: default
- name: deployment.environment
default: default
# This is added to ensure the uniqueness of the timeseries
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
# memory_limiter:
# # 80% of maximum memory up to 2G
# limit_mib: 1500
@@ -138,6 +116,8 @@ exporters:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
clickhousemetricswritev2:
dsn: tcp://clickhouse:9000/signoz_metrics
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
@@ -161,20 +141,20 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [clickhousemetricswrite]
metrics/generic:
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
metrics/hostmetrics:
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [clickhousemetricswrite]
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus]
exporters: [clickhousemetricswrite/prometheus, clickhousemetricswritev2]
logs:
receivers: [otlp, tcplog/docker]
processors: [batch]

View File

@@ -57,35 +57,11 @@ receivers:
labels:
job_name: otel-collector
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
signozspanmetrics/cumulative:
metrics_exporter: clickhousemetricswrite
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
dimensions:
- name: service.namespace
default: default
- name: deployment.environment
default: default
# This is added to ensure the uniqueness of the timeseries
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
# memory_limiter:
# # 80% of maximum memory up to 2G
# limit_mib: 1500
@@ -149,6 +125,8 @@ exporters:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
clickhousemetricswritev2:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
@@ -168,20 +146,20 @@ service:
pipelines:
traces:
receivers: [jaeger, otlp]
processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [clickhousemetricswrite]
metrics/generic:
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
metrics/hostmetrics:
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [clickhousemetricswrite]
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [clickhousemetricswrite/prometheus]
exporters: [clickhousemetricswrite/prometheus, clickhousemetricswritev2]
logs:
receivers: [otlp, tcplog/docker]
processors: [batch]

View File

@@ -1,5 +1,5 @@
# use a minimal alpine image
FROM alpine:3.18.6
FROM alpine:3.20.3
# Add Maintainer Info
LABEL maintainer="signoz"

View File

@@ -67,6 +67,30 @@ func StartManager(dbType string, db *sqlx.DB, useLicensesV3 bool, features ...ba
repo: &repo,
}
if useLicensesV3 {
// get active license from the db
active, err := m.repo.GetActiveLicense(context.Background())
if err != nil {
return m, err
}
// if we have an active license then need to fetch the complete details
if active != nil {
// fetch the new license structure from control plane
licenseV3, apiError := validate.ValidateLicenseV3(active.Key)
if apiError != nil {
return m, apiError
}
// insert the licenseV3 in sqlite db
apiError = m.repo.InsertLicenseV3(context.Background(), licenseV3)
// if the license already exists move ahead.
if apiError != nil && apiError.Typ != model.ErrorConflict {
return m, apiError
}
}
}
if err := m.start(useLicensesV3, features...); err != nil {
return m, err
}

View File

@@ -61,6 +61,11 @@ func NewAnomalyRule(
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
if p.RuleCondition.CompareOp == baserules.ValueIsBelow {
target := -1 * *p.RuleCondition.Target
p.RuleCondition.Target = &target
}
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
if err != nil {
return nil, err

View File

@@ -186,6 +186,7 @@
"@types/webpack-dev-server": "^4.7.2",
"@typescript-eslint/eslint-plugin": "^4.33.0",
"@typescript-eslint/parser": "^4.33.0",
"@vvo/tzdb": "6.149.0",
"autoprefixer": "10.4.19",
"babel-plugin-styled-components": "^1.12.0",
"compression-webpack-plugin": "9.0.0",

View File

@@ -22,6 +22,7 @@ import AppActions from 'types/actions';
import { UPDATE_USER_IS_FETCH } from 'types/actions/app';
import { Organization } from 'types/api/user/getOrganization';
import AppReducer from 'types/reducer/app';
import { isCloudUser } from 'utils/app';
import { routePermission } from 'utils/permission';
import routes, {
@@ -76,6 +77,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const { t } = useTranslation(['common']);
const isCloudUserVal = isCloudUser();
const localStorageUserAuthToken = getInitialUserTokenRefreshToken();
const dispatch = useDispatch<Dispatch<AppActions>>();
@@ -143,6 +146,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const handleRedirectForOrgOnboarding = (key: string): void => {
if (
isLoggedInState &&
isCloudUserVal &&
!isFetchingOrgPreferences &&
!isLoadingOrgUsers &&
!isEmpty(orgUsers?.payload) &&
@@ -158,6 +162,10 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
history.push(ROUTES.ONBOARDING);
}
}
if (!isCloudUserVal && key === 'ONBOARDING') {
history.push(ROUTES.APPLICATION);
}
};
const handleUserLoginIfTokenPresent = async (
@@ -250,7 +258,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const handleRouting = (): void => {
const showOrgOnboarding = shouldShowOnboarding();
if (showOrgOnboarding && !isOnboardingComplete) {
if (showOrgOnboarding && !isOnboardingComplete && isCloudUserVal) {
history.push(ROUTES.ONBOARDING);
} else {
history.push(ROUTES.APPLICATION);

View File

@@ -119,3 +119,42 @@
color: var(--bg-slate-400) !important;
}
}
.date-time-popover-footer {
border-top: 1px solid var(--bg-ink-200);
padding: 8px 14px;
.timezone-container {
&,
.timezone {
font-family: Inter;
font-size: 12px;
line-height: 16px;
letter-spacing: -0.06px;
}
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
gap: 6px;
.timezone {
cursor: pointer;
padding: 0;
color: var(--bg-vanilla-100);
background-color: transparent;
border: none;
}
}
}
.timezone-badge {
display: flex;
align-items: center;
justify-content: center;
padding: 0 4px;
border-radius: 2px;
background: rgba(171, 189, 255, 0.04);
color: var(--bg-vanilla-100);
font-size: 12px;
font-weight: 400;
line-height: 16px;
letter-spacing: -0.06px;
cursor: pointer;
}

View File

@@ -15,11 +15,14 @@ import { isValidTimeFormat } from 'lib/getMinMax';
import { defaultTo, isFunction, noop } from 'lodash-es';
import debounce from 'lodash-es/debounce';
import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import {
ChangeEvent,
Dispatch,
SetStateAction,
useCallback,
useEffect,
useMemo,
useState,
} from 'react';
import { useLocation } from 'react-router-dom';
@@ -28,6 +31,8 @@ import { popupContainer } from 'utils/selectPopupContainer';
import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent';
const maxAllowedMinTimeInMonths = 6;
type ViewType = 'datetime' | 'timezone';
const DEFAULT_VIEW: ViewType = 'datetime';
interface CustomTimePickerProps {
onSelect: (value: string) => void;
@@ -81,6 +86,25 @@ function CustomTimePicker({
const location = useLocation();
const [isInputFocused, setIsInputFocused] = useState(false);
const [activeView, setActiveView] = useState<ViewType>(DEFAULT_VIEW);
const { timezone, browserTimezone } = useTimezone();
const activeTimezoneOffset = timezone?.offset;
const isTimezoneOverridden = useMemo(
() => timezone?.offset !== browserTimezone.offset,
[timezone, browserTimezone],
);
const handleViewChange = useCallback(
(newView: 'timezone' | 'datetime'): void => {
if (activeView !== newView) {
setActiveView(newView);
}
setOpen(!open);
},
[activeView, open, setOpen],
);
const getSelectedTimeRangeLabel = (
selectedTime: string,
selectedTimeValue: string,
@@ -132,6 +156,7 @@ function CustomTimePicker({
setOpen(newOpen);
if (!newOpen) {
setCustomDTPickerVisible?.(false);
setActiveView('datetime');
}
};
@@ -281,6 +306,8 @@ function CustomTimePicker({
handleGoLive={defaultTo(handleGoLive, noop)}
options={items}
selectedTime={selectedTime}
activeView={activeView}
setActiveView={setActiveView}
/>
) : (
content
@@ -317,12 +344,23 @@ function CustomTimePicker({
)
}
suffix={
<ChevronDown
size={14}
onClick={(): void => {
setOpen(!open);
}}
/>
<>
{!!isTimezoneOverridden && activeTimezoneOffset && (
<div
className="timezone-badge"
onClick={(e): void => {
e.stopPropagation();
handleViewChange('timezone');
}}
>
<span>{activeTimezoneOffset}</span>
</div>
)}
<ChevronDown
size={14}
onClick={(): void => handleViewChange('datetime')}
/>
</>
}
/>
</Popover>

View File

@@ -1,5 +1,6 @@
import './CustomTimePicker.styles.scss';
import { Color } from '@signozhq/design-tokens';
import { Button } from 'antd';
import cx from 'classnames';
import ROUTES from 'constants/routes';
@@ -9,10 +10,13 @@ import {
Option,
RelativeDurationSuggestionOptions,
} from 'container/TopNav/DateTimeSelectionV2/config';
import { Clock } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { Dispatch, SetStateAction, useMemo } from 'react';
import { useLocation } from 'react-router-dom';
import RangePickerModal from './RangePickerModal';
import TimezonePicker from './TimezonePicker';
interface CustomTimePickerPopoverContentProps {
options: any[];
@@ -26,8 +30,11 @@ interface CustomTimePickerPopoverContentProps {
onSelectHandler: (label: string, value: string) => void;
handleGoLive: () => void;
selectedTime: string;
activeView: 'datetime' | 'timezone';
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function CustomTimePickerPopoverContent({
options,
setIsOpen,
@@ -37,12 +44,16 @@ function CustomTimePickerPopoverContent({
onSelectHandler,
handleGoLive,
selectedTime,
activeView,
setActiveView,
}: CustomTimePickerPopoverContentProps): JSX.Element {
const { pathname } = useLocation();
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
pathname,
]);
const { timezone } = useTimezone();
const activeTimezoneOffset = timezone?.offset;
function getTimeChips(options: Option[]): JSX.Element {
return (
@@ -63,55 +74,75 @@ function CustomTimePickerPopoverContent({
);
}
return (
<div className="date-time-popover">
<div className="date-time-options">
{isLogsExplorerPage && (
<Button className="data-time-live" type="text" onClick={handleGoLive}>
Live
</Button>
)}
{options.map((option) => (
<Button
type="text"
key={option.label + option.value}
onClick={(): void => {
onSelectHandler(option.label, option.value);
}}
className={cx(
'date-time-options-btn',
customDateTimeVisible
? option.value === 'custom' && 'active'
: selectedTime === option.value && 'active',
)}
return activeView === 'datetime' ? (
<div>
<div className="date-time-popover">
<div className="date-time-options">
{isLogsExplorerPage && (
<Button className="data-time-live" type="text" onClick={handleGoLive}>
Live
</Button>
)}
{options.map((option) => (
<Button
type="text"
key={option.label + option.value}
onClick={(): void => {
onSelectHandler(option.label, option.value);
}}
className={cx(
'date-time-options-btn',
customDateTimeVisible
? option.value === 'custom' && 'active'
: selectedTime === option.value && 'active',
)}
>
{option.label}
</Button>
))}
</div>
<div
className={cx(
'relative-date-time',
selectedTime === 'custom' || customDateTimeVisible
? 'date-picker'
: 'relative-times',
)}
>
{selectedTime === 'custom' || customDateTimeVisible ? (
<RangePickerModal
setCustomDTPickerVisible={setCustomDTPickerVisible}
setIsOpen={setIsOpen}
onCustomDateHandler={onCustomDateHandler}
selectedTime={selectedTime}
/>
) : (
<div className="relative-times-container">
<div className="time-heading">RELATIVE TIMES</div>
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
</div>
)}
</div>
</div>
<div className="date-time-popover-footer">
<div className="timezone-container">
<Clock color={Color.BG_VANILLA_400} height={12} width={12} />
<span className="timezone-text">You are at</span>
<button
type="button"
className="timezone"
onClick={(): void => setActiveView('timezone')}
>
{option.label}
</Button>
))}
</div>
<div
className={cx(
'relative-date-time',
selectedTime === 'custom' || customDateTimeVisible
? 'date-picker'
: 'relative-times',
)}
>
{selectedTime === 'custom' || customDateTimeVisible ? (
<RangePickerModal
setCustomDTPickerVisible={setCustomDTPickerVisible}
setIsOpen={setIsOpen}
onCustomDateHandler={onCustomDateHandler}
selectedTime={selectedTime}
/>
) : (
<div className="relative-times-container">
<div className="time-heading">RELATIVE TIMES</div>
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
</div>
)}
{activeTimezoneOffset}
</button>
</div>
</div>
</div>
) : (
<div className="date-time-popover">
<TimezonePicker setActiveView={setActiveView} setIsOpen={setIsOpen} />
</div>
);
}

View File

@@ -4,6 +4,7 @@ import { DatePicker } from 'antd';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config';
import dayjs, { Dayjs } from 'dayjs';
import { useTimezone } from 'providers/Timezone';
import { Dispatch, SetStateAction } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -49,6 +50,8 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
}
onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER);
};
const { timezone } = useTimezone();
return (
<div className="custom-date-picker">
<RangePicker
@@ -58,7 +61,10 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
onOk={onModalOkHandler}
// eslint-disable-next-line react/jsx-props-no-spreading
{...(selectedTime === 'custom' && {
defaultValue: [dayjs(minTime / 1000000), dayjs(maxTime / 1000000)],
defaultValue: [
dayjs(minTime / 1000000).tz(timezone.value),
dayjs(maxTime / 1000000).tz(timezone.value),
],
})}
/>
</div>

View File

@@ -0,0 +1,125 @@
// Variables
$font-family: 'Inter';
$border-color: var(--bg-slate-400);
$item-spacing: 8px;
// Mixins
@mixin text-style-base {
font-family: $font-family;
font-style: normal;
font-weight: 400;
}
@mixin flex-center {
display: flex;
align-items: center;
}
.timezone-picker {
width: 532px;
color: var(--bg-vanilla-400);
font-family: $font-family;
&__search {
@include flex-center;
justify-content: space-between;
padding: 12px 14px;
border-bottom: 1px solid $border-color;
}
&__input-container {
@include flex-center;
gap: 6px;
width: -webkit-fill-available;
}
&__input {
@include text-style-base;
width: 100%;
background: transparent;
border: none;
outline: none;
color: var(--bg-vanilla-100);
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
padding: 0;
&::placeholder {
color: var(--bg-vanilla-400);
}
}
&__esc-key {
@include text-style-base;
font-size: 8px;
color: var(--bg-vanilla-400);
letter-spacing: -0.04px;
border-radius: 2.286px;
border: 1.143px solid var(--bg-ink-200);
border-bottom-width: 2.286px;
background: var(--bg-ink-400);
padding: 0 1px;
}
&__list {
max-height: 310px;
overflow-y: auto;
}
&__item {
@include flex-center;
justify-content: space-between;
padding: 7.5px 6px 7.5px $item-spacing;
margin: 4px $item-spacing;
cursor: pointer;
background: transparent;
border: none;
width: -webkit-fill-available;
color: var(--bg-vanilla-400);
font-family: $font-family;
&:hover,
&.selected {
border-radius: 2px;
background: rgba(171, 189, 255, 0.04);
color: var(--bg-vanilla-100);
}
&.has-divider {
position: relative;
&::after {
content: '';
position: absolute;
bottom: -2px;
left: -$item-spacing;
right: -$item-spacing;
border-bottom: 1px solid $border-color;
}
}
}
&__name {
@include text-style-base;
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
}
&__offset {
color: var(--bg-vanilla-100);
font-size: 12px;
line-height: 16px;
letter-spacing: -0.06px;
}
}
.timezone-name-wrapper {
@include flex-center;
gap: 6px;
&__selected-icon {
height: 15px;
width: 15px;
}
}

View File

@@ -0,0 +1,156 @@
import './TimezonePicker.styles.scss';
import { Color } from '@signozhq/design-tokens';
import cx from 'classnames';
import { TimezonePickerShortcuts } from 'constants/shortcuts/TimezonePickerShortcuts';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { Check, Search } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import {
Dispatch,
SetStateAction,
useCallback,
useEffect,
useState,
} from 'react';
import { Timezone, TIMEZONE_DATA } from './timezoneUtils';
interface SearchBarProps {
value: string;
onChange: (value: string) => void;
}
interface TimezoneItemProps {
timezone: Timezone;
isSelected?: boolean;
onClick?: () => void;
}
const ICON_SIZE = 14;
function SearchBar({ value, onChange }: SearchBarProps): JSX.Element {
return (
<div className="timezone-picker__search">
<div className="timezone-picker__input-container">
<Search color={Color.BG_VANILLA_400} height={ICON_SIZE} width={ICON_SIZE} />
<input
type="text"
className="timezone-picker__input"
placeholder="Search timezones..."
value={value}
onChange={(e): void => onChange(e.target.value)}
/>
</div>
<kbd className="timezone-picker__esc-key">esc</kbd>
</div>
);
}
function TimezoneItem({
timezone,
isSelected = false,
onClick,
}: TimezoneItemProps): JSX.Element {
return (
<button
type="button"
className={cx('timezone-picker__item', {
selected: isSelected,
'has-divider': timezone.hasDivider,
})}
onClick={onClick}
>
<div className="timezone-name-wrapper">
<div className="timezone-name-wrapper__selected-icon">
{isSelected && (
<Check
color={Color.BG_VANILLA_100}
height={ICON_SIZE}
width={ICON_SIZE}
/>
)}
</div>
<div className="timezone-picker__name">{timezone.name}</div>
</div>
<div className="timezone-picker__offset">{timezone.offset}</div>
</button>
);
}
TimezoneItem.defaultProps = {
isSelected: false,
onClick: undefined,
};
interface TimezonePickerProps {
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
setIsOpen: Dispatch<SetStateAction<boolean>>;
}
function TimezonePicker({
setActiveView,
setIsOpen,
}: TimezonePickerProps): JSX.Element {
const [searchTerm, setSearchTerm] = useState('');
const { timezone, updateTimezone } = useTimezone();
const [selectedTimezone, setSelectedTimezone] = useState<string>(
timezone?.name ?? TIMEZONE_DATA[0].name,
);
const getFilteredTimezones = useCallback((searchTerm: string): Timezone[] => {
const normalizedSearch = searchTerm.toLowerCase();
return TIMEZONE_DATA.filter(
(tz) =>
tz.name.toLowerCase().includes(normalizedSearch) ||
tz.offset.toLowerCase().includes(normalizedSearch) ||
tz.searchIndex.toLowerCase().includes(normalizedSearch),
);
}, []);
const handleCloseTimezonePicker = useCallback(() => {
setActiveView('datetime');
}, [setActiveView]);
const handleTimezoneSelect = useCallback(
(timezone: Timezone) => {
setSelectedTimezone(timezone.name);
updateTimezone(timezone);
handleCloseTimezonePicker();
setIsOpen(false);
},
[handleCloseTimezonePicker, setIsOpen, updateTimezone],
);
// Register keyboard shortcuts
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
useEffect(() => {
registerShortcut(
TimezonePickerShortcuts.CloseTimezonePicker,
handleCloseTimezonePicker,
);
return (): void => {
deregisterShortcut(TimezonePickerShortcuts.CloseTimezonePicker);
};
}, [deregisterShortcut, handleCloseTimezonePicker, registerShortcut]);
return (
<div className="timezone-picker">
<SearchBar value={searchTerm} onChange={setSearchTerm} />
<div className="timezone-picker__list">
{getFilteredTimezones(searchTerm).map((timezone) => (
<TimezoneItem
key={timezone.value}
timezone={timezone}
isSelected={timezone.name === selectedTimezone}
onClick={(): void => handleTimezoneSelect(timezone)}
/>
))}
</div>
</div>
);
}
export default TimezonePicker;

View File

@@ -0,0 +1,142 @@
import { getTimeZones } from '@vvo/tzdb';
import dayjs from 'dayjs';
import timezone from 'dayjs/plugin/timezone';
import utc from 'dayjs/plugin/utc';
dayjs.extend(utc);
dayjs.extend(timezone);
export interface Timezone {
name: string;
value: string;
offset: string;
searchIndex: string;
hasDivider?: boolean;
}
// Constants
const TIMEZONE_TYPES = {
BROWSER: 'BROWSER',
UTC: 'UTC',
STANDARD: 'STANDARD',
} as const;
type TimezoneType = typeof TIMEZONE_TYPES[keyof typeof TIMEZONE_TYPES];
const UTC_TIMEZONE: Timezone = {
name: 'Coordinated Universal Time — UTC, GMT',
value: 'UTC',
offset: 'UTC',
searchIndex: 'UTC',
hasDivider: true,
};
// Helper functions
const isValidTimezone = (tzName: string): boolean => {
try {
dayjs.tz(dayjs(), tzName);
return true;
} catch {
return false;
}
};
const formatOffset = (offsetMinutes: number): string => {
if (offsetMinutes === 0) return 'UTC';
const hours = Math.floor(Math.abs(offsetMinutes) / 60);
const minutes = Math.abs(offsetMinutes) % 60;
const sign = offsetMinutes > 0 ? '+' : '-';
return `UTC ${sign} ${hours}${
minutes ? `:${minutes.toString().padStart(2, '0')}` : ':00'
}`;
};
const createTimezoneEntry = (
name: string,
offsetMinutes: number,
type: TimezoneType = TIMEZONE_TYPES.STANDARD,
hasDivider = false,
): Timezone => {
const offset = formatOffset(offsetMinutes);
let value = name;
let displayName = name;
switch (type) {
case TIMEZONE_TYPES.BROWSER:
displayName = `Browser time — ${name}`;
value = name;
break;
case TIMEZONE_TYPES.UTC:
displayName = 'Coordinated Universal Time — UTC, GMT';
value = 'UTC';
break;
case TIMEZONE_TYPES.STANDARD:
displayName = name;
value = name;
break;
default:
console.error(`Invalid timezone type: ${type}`);
}
return {
name: displayName,
value,
offset,
searchIndex: offset.replace(/ /g, ''),
...(hasDivider && { hasDivider }),
};
};
const getOffsetByTimezone = (timezone: string): number => {
const dayjsTimezone = dayjs().tz(timezone);
return dayjsTimezone.utcOffset();
};
export const getBrowserTimezone = (): Timezone => {
const browserTz = dayjs.tz.guess();
const browserOffset = getOffsetByTimezone(browserTz);
return createTimezoneEntry(browserTz, browserOffset, TIMEZONE_TYPES.BROWSER);
};
const filterAndSortTimezones = (
allTimezones: ReturnType<typeof getTimeZones>,
browserTzName?: string,
): Timezone[] =>
allTimezones
.filter(
(tz) =>
!tz.name.startsWith('Etc/') &&
isValidTimezone(tz.name) &&
tz.name !== browserTzName,
)
.sort((a, b) => a.name.localeCompare(b.name))
.map((tz) => createTimezoneEntry(tz.name, tz.rawOffsetInMinutes));
const generateTimezoneData = (): Timezone[] => {
const allTimezones = getTimeZones();
const timezones: Timezone[] = [];
// Add browser timezone
const browserTzObject = getBrowserTimezone();
timezones.push(browserTzObject);
// Add UTC timezone with divider
timezones.push(UTC_TIMEZONE);
// Add remaining timezones
timezones.push(...filterAndSortTimezones(allTimezones, browserTzObject.value));
return timezones;
};
export const getTimezoneObjectByTimezoneString = (
timezone: string,
): Timezone => {
const utcOffset = getOffsetByTimezone(timezone);
return createTimezoneEntry(timezone, utcOffset);
};
export const TIMEZONE_DATA = generateTimezoneData();

View File

@@ -1,4 +1,5 @@
import {
_adapters,
BarController,
BarElement,
CategoryScale,
@@ -18,8 +19,10 @@ import {
} from 'chart.js';
import annotationPlugin from 'chartjs-plugin-annotation';
import { generateGridTitle } from 'container/GridPanelSwitch/utils';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import isEqual from 'lodash-es/isEqual';
import { useTimezone } from 'providers/Timezone';
import {
forwardRef,
memo,
@@ -62,6 +65,17 @@ Chart.register(
Tooltip.positioners.custom = TooltipPositionHandler;
// Map of Chart.js time formats to dayjs format strings
const formatMap = {
'HH:mm:ss': 'HH:mm:ss',
'HH:mm': 'HH:mm',
'MM/DD HH:mm': 'MM/DD HH:mm',
'MM/dd HH:mm': 'MM/DD HH:mm',
'MM/DD': 'MM/DD',
'YY-MM': 'YY-MM',
YY: 'YY',
};
const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
(
{
@@ -80,11 +94,13 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
dragSelectColor,
},
ref,
// eslint-disable-next-line sonarjs/cognitive-complexity
): JSX.Element => {
const nearestDatasetIndex = useRef<null | number>(null);
const chartRef = useRef<HTMLCanvasElement>(null);
const isDarkMode = useIsDarkMode();
const gridTitle = useMemo(() => generateGridTitle(title), [title]);
const { timezone } = useTimezone();
const currentTheme = isDarkMode ? 'dark' : 'light';
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
@@ -112,6 +128,22 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
return 'rgba(231,233,237,0.8)';
}, [currentTheme]);
// Override Chart.js date adapter to use dayjs with timezone support
useEffect(() => {
_adapters._date.override({
format(time: number | Date, fmt: string) {
const dayjsTime = dayjs(time).tz(timezone?.value);
const format = formatMap[fmt as keyof typeof formatMap];
if (!format) {
console.warn(`Missing datetime format for ${fmt}`);
return dayjsTime.format('YYYY-MM-DD HH:mm:ss'); // fallback format
}
return dayjsTime.format(format);
},
});
}, [timezone]);
const buildChart = useCallback(() => {
if (lineChartRef.current !== undefined) {
lineChartRef.current.destroy();
@@ -132,6 +164,7 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
isStacked,
onClickHandler,
data,
timezone,
);
const chartHasData = hasData(data);
@@ -166,6 +199,7 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
isStacked,
onClickHandler,
data,
timezone,
name,
type,
]);

View File

@@ -1,5 +1,6 @@
import { Chart, ChartConfiguration, ChartData, Color } from 'chart.js';
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import dayjs from 'dayjs';
import { MutableRefObject } from 'react';
@@ -50,6 +51,7 @@ export const getGraphOptions = (
isStacked: boolean | undefined,
onClickHandler: GraphOnClickHandler | undefined,
data: ChartData,
timezone: Timezone,
// eslint-disable-next-line sonarjs/cognitive-complexity
): CustomChartOptions => ({
animation: {
@@ -97,7 +99,7 @@ export const getGraphOptions = (
callbacks: {
title(context): string | string[] {
const date = dayjs(context[0].parsed.x);
return date.format('MMM DD, YYYY, HH:mm:ss');
return date.tz(timezone?.value).format('MMM DD, YYYY, HH:mm:ss');
},
label(context): string | string[] {
let label = context.dataset.label || '';

View File

@@ -8,13 +8,13 @@ import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { unescapeString } from 'container/LogDetailedView/utils';
import { FontSize } from 'container/OptionsMenu/types';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useIsDarkMode } from 'hooks/useDarkMode';
// utils
import { FlatLogData } from 'lib/logs/flatLogData';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useMemo, useState } from 'react';
// interfaces
import { IField } from 'types/api/logs/fields';
@@ -174,12 +174,20 @@ function ListLogView({
[selectedFields],
);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const timestampValue = useMemo(
() =>
typeof flattenLogData.timestamp === 'string'
? dayjs(flattenLogData.timestamp).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(flattenLogData.timestamp / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS'),
[flattenLogData.timestamp],
? formatTimezoneAdjustedTimestamp(
flattenLogData.timestamp,
'YYYY-MM-DD HH:mm:ss.SSS',
)
: formatTimezoneAdjustedTimestamp(
flattenLogData.timestamp / 1e6,
'YYYY-MM-DD HH:mm:ss.SSS',
),
[flattenLogData.timestamp, formatTimezoneAdjustedTimestamp],
);
const logType = getLogIndicatorType(logData);

View File

@@ -6,7 +6,6 @@ import LogDetail from 'components/LogDetail';
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
import { unescapeString } from 'container/LogDetailedView/utils';
import LogsExplorerContext from 'container/LogsExplorerContext';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
@@ -14,6 +13,7 @@ import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { FlatLogData } from 'lib/logs/flatLogData';
import { isEmpty, isNumber, isUndefined } from 'lodash-es';
import { useTimezone } from 'providers/Timezone';
import {
KeyboardEvent,
MouseEvent,
@@ -89,16 +89,24 @@ function RawLogView({
attributesText += ' | ';
}
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const text = useMemo(() => {
const date =
typeof data.timestamp === 'string'
? dayjs(data.timestamp)
: dayjs(data.timestamp / 1e6);
? formatTimezoneAdjustedTimestamp(data.timestamp, 'YYYY-MM-DD HH:mm:ss.SSS')
: formatTimezoneAdjustedTimestamp(
data.timestamp / 1e6,
'YYYY-MM-DD HH:mm:ss.SSS',
);
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
data.body
}`;
}, [data.timestamp, data.body, attributesText]);
return `${date} | ${attributesText} ${data.body}`;
}, [
data.timestamp,
data.body,
attributesText,
formatTimezoneAdjustedTimestamp,
]);
const handleClickExpand = useCallback(() => {
if (activeContextLog || isReadOnly) return;

View File

@@ -5,10 +5,10 @@ import { Typography } from 'antd';
import { ColumnsType } from 'antd/es/table';
import cx from 'classnames';
import { unescapeString } from 'container/LogDetailedView/utils';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { FlatLogData } from 'lib/logs/flatLogData';
import { useTimezone } from 'providers/Timezone';
import { useMemo } from 'react';
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
@@ -44,6 +44,8 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
logs,
]);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
.filter((e) => e.name !== 'id')
@@ -81,8 +83,11 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
render: (field, item): ColumnTypeRender<Record<string, unknown>> => {
const date =
typeof field === 'string'
? dayjs(field).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(field / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
? formatTimezoneAdjustedTimestamp(field, 'YYYY-MM-DD HH:mm:ss.SSS')
: formatTimezoneAdjustedTimestamp(
field / 1e6,
'YYYY-MM-DD HH:mm:ss.SSS',
);
return {
children: (
<div className="table-timestamp">
@@ -125,7 +130,15 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
},
...(appendTo === 'end' ? fieldColumns : []),
];
}, [fields, isListViewPanel, appendTo, isDarkMode, linesPerRow, fontSize]);
}, [
fields,
isListViewPanel,
appendTo,
isDarkMode,
linesPerRow,
fontSize,
formatTimezoneAdjustedTimestamp,
]);
return { columns, dataSource: flattenLogData };
};

View File

@@ -1,11 +1,13 @@
import { Typography } from 'antd';
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
import getFormattedDate from 'lib/getFormatedDate';
import { useTimezone } from 'providers/Timezone';
function Time({ CreatedOrUpdateTime }: DateProps): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const time = new Date(CreatedOrUpdateTime);
const date = getFormattedDate(time);
const timeString = `${date} ${convertDateToAmAndPm(time)}`;
const timeString = formatTimezoneAdjustedTimestamp(
time,
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
);
return <Typography>{timeString}</Typography>;
}

View File

@@ -21,4 +21,5 @@ export enum LOCALSTORAGE {
THEME_ANALYTICS_V1 = 'THEME_ANALYTICS_V1',
LAST_USED_SAVED_VIEWS = 'LAST_USED_SAVED_VIEWS',
SHOW_LOGS_QUICK_FILTERS = 'SHOW_LOGS_QUICK_FILTERS',
PREFERRED_TIMEZONE = 'PREFERRED_TIMEZONE',
}

View File

@@ -0,0 +1,3 @@
export const TimezonePickerShortcuts = {
CloseTimezonePicker: 'escape',
};

View File

@@ -7,6 +7,7 @@ import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
import { useTimezone } from 'providers/Timezone';
import { useMemo, useRef } from 'react';
import { useDispatch } from 'react-redux';
import { UpdateTimeInterval } from 'store/actions';
@@ -48,6 +49,7 @@ function HorizontalTimelineGraph({
const urlQuery = useUrlQuery();
const dispatch = useDispatch();
const { timezone } = useTimezone();
const options: uPlot.Options = useMemo(
() => ({
@@ -116,8 +118,18 @@ function HorizontalTimelineGraph({
}),
]
: [],
tzDate: (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
}),
[width, isDarkMode, transformedData.length, urlQuery, dispatch],
[
width,
isDarkMode,
transformedData.length,
urlQuery,
dispatch,
timezone?.value,
],
);
return <Uplot data={transformedData} options={options} />;
}

View File

@@ -6,6 +6,7 @@ import {
useGetAlertRuleDetailsTimelineTable,
useTimelineTable,
} from 'pages/AlertDetails/hooks';
import { useTimezone } from 'providers/Timezone';
import { useMemo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
@@ -39,6 +40,8 @@ function TimelineTable(): JSX.Element {
const { t } = useTranslation('common');
const { formatTimezoneAdjustedTimestamp } = useTimezone();
if (isError || !isValidRuleId || !ruleId) {
return <div>{t('something_went_wrong')}</div>;
}
@@ -51,6 +54,7 @@ function TimelineTable(): JSX.Element {
filters,
labels: labels ?? {},
setFilters,
formatTimezoneAdjustedTimestamp,
})}
dataSource={timelineData}
pagination={paginationConfig}

View File

@@ -8,6 +8,7 @@ import ClientSideQBSearch, {
import { ConditionalAlertPopover } from 'container/AlertHistory/AlertPopover/AlertPopover';
import { transformKeyValuesToAttributeValuesMap } from 'container/QueryBuilder/filters/utils';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
import { Search } from 'lucide-react';
import AlertLabels, {
AlertLabelsProps,
@@ -16,7 +17,6 @@ import AlertState from 'pages/AlertDetails/AlertHeader/AlertState/AlertState';
import { useMemo } from 'react';
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { formatEpochTimestamp } from 'utils/timeUtils';
const transformLabelsToQbKeys = (
labels: AlertRuleTimelineTableResponse['labels'],
@@ -74,10 +74,15 @@ export const timelineTableColumns = ({
filters,
labels,
setFilters,
formatTimezoneAdjustedTimestamp,
}: {
filters: TagFilter;
labels: AlertLabelsProps['labels'];
setFilters: (filters: TagFilter) => void;
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string;
}): ColumnsType<AlertRuleTimelineTableResponse> => [
{
title: 'STATE',
@@ -106,7 +111,9 @@ export const timelineTableColumns = ({
dataIndex: 'unixMilli',
width: 200,
render: (value): JSX.Element => (
<div className="alert-rule__created-at">{formatEpochTimestamp(value)}</div>
<div className="alert-rule__created-at">
{formatTimezoneAdjustedTimestamp(value, 'MMM D, YYYY ⎯ HH:mm:ss')}
</div>
),
},
{

View File

@@ -17,14 +17,15 @@ import getAll from 'api/errors/getAll';
import getErrorCounts from 'api/errors/getErrorCounts';
import { ResizeTable } from 'components/ResizeTable';
import ROUTES from 'constants/routes';
import dayjs from 'dayjs';
import { useNotifications } from 'hooks/useNotifications';
import useResourceAttribute from 'hooks/useResourceAttribute';
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history';
import { isUndefined } from 'lodash-es';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useEffect, useMemo, useRef } from 'react';
import { useTranslation } from 'react-i18next';
import { useQueries } from 'react-query';
@@ -155,8 +156,16 @@ function AllErrors(): JSX.Element {
}
}, [data?.error, data?.payload, t, notifications]);
const getDateValue = (value: string): JSX.Element => (
<Typography>{dayjs(value).format('DD/MM/YYYY HH:mm:ss A')}</Typography>
const getDateValue = (
value: string,
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string,
): JSX.Element => (
<Typography>
{formatTimezoneAdjustedTimestamp(value, 'DD/MM/YYYY hh:mm:ss A')}
</Typography>
);
const filterIcon = useCallback(() => <SearchOutlined />, []);
@@ -283,6 +292,8 @@ function AllErrors(): JSX.Element {
[filterIcon, filterDropdownWrapper],
);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns: ColumnsType<Exception> = [
{
title: 'Exception Type',
@@ -342,7 +353,8 @@ function AllErrors(): JSX.Element {
dataIndex: 'lastSeen',
width: 80,
key: 'lastSeen',
render: getDateValue,
render: (value): JSX.Element =>
getDateValue(value, formatTimezoneAdjustedTimestamp),
sorter: true,
defaultSortOrder: getDefaultOrder(
getUpdatedParams,
@@ -355,7 +367,8 @@ function AllErrors(): JSX.Element {
dataIndex: 'firstSeen',
width: 80,
key: 'firstSeen',
render: getDateValue,
render: (value): JSX.Element =>
getDateValue(value, formatTimezoneAdjustedTimestamp),
sorter: true,
defaultSortOrder: getDefaultOrder(
getUpdatedParams,

View File

@@ -10,6 +10,7 @@ import getAxes from 'lib/uPlotLib/utils/getAxes';
import { getUplotChartDataForAnomalyDetection } from 'lib/uPlotLib/utils/getUplotChartData';
import { getYAxisScaleForAnomalyDetection } from 'lib/uPlotLib/utils/getYAxisScale';
import { LineChart } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { useEffect, useRef, useState } from 'react';
import uPlot from 'uplot';
@@ -148,10 +149,12 @@ function AnomalyAlertEvaluationView({
]
: [];
const { timezone } = useTimezone();
const options = {
width: dimensions.width,
height: dimensions.height - 36,
plugins: [bandsPlugin, tooltipPlugin(isDarkMode)],
plugins: [bandsPlugin, tooltipPlugin(isDarkMode, timezone?.value)],
focus: {
alpha: 0.3,
},
@@ -256,6 +259,8 @@ function AnomalyAlertEvaluationView({
show: true,
},
axes: getAxes(isDarkMode, yAxisUnit),
tzDate: (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
};
const handleSearch = (searchText: string): void => {

View File

@@ -1,8 +1,10 @@
import { themeColors } from 'constants/theme';
import dayjs from 'dayjs';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
const tooltipPlugin = (
isDarkMode: boolean,
timezone: string,
): { hooks: { init: (u: any) => void } } => {
let tooltip: HTMLDivElement;
const tooltipLeftOffset = 10;
@@ -17,7 +19,7 @@ const tooltipPlugin = (
return value.toFixed(3);
}
if (value instanceof Date) {
return value.toLocaleString();
return dayjs(value).tz(timezone).format('MM/DD/YYYY, h:mm:ss A');
}
if (value == null) {
return 'N/A';

View File

@@ -6,12 +6,12 @@ import getNextPrevId from 'api/errors/getNextPrevId';
import Editor from 'components/Editor';
import { ResizeTable } from 'components/ResizeTable';
import { getNanoSeconds } from 'container/AllError/utils';
import dayjs from 'dayjs';
import { useNotifications } from 'hooks/useNotifications';
import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history';
import { isUndefined } from 'lodash-es';
import { urlKey } from 'pages/ErrorDetails/utils';
import { useTimezone } from 'providers/Timezone';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query';
@@ -103,8 +103,6 @@ function ErrorDetails(props: ErrorDetailsProps): JSX.Element {
}
};
const timeStamp = dayjs(errorDetail.timestamp);
const data: { key: string; value: string }[] = Object.keys(errorDetail)
.filter((e) => !keyToExclude.includes(e))
.map((key) => ({
@@ -136,6 +134,8 @@ function ErrorDetails(props: ErrorDetailsProps): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [data]);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
return (
<>
<Typography>{errorDetail.exceptionType}</Typography>
@@ -145,7 +145,12 @@ function ErrorDetails(props: ErrorDetailsProps): JSX.Element {
<EventContainer>
<div>
<Typography>Event {errorDetail.errorId}</Typography>
<Typography>{timeStamp.format('MMM DD YYYY hh:mm:ss A')}</Typography>
<Typography>
{formatTimezoneAdjustedTimestamp(
errorDetail.timestamp,
'DD/MM/YYYY hh:mm:ss A (UTC Z)',
)}
</Typography>
</div>
<div>
<Space align="end" direction="horizontal">

View File

@@ -8,7 +8,7 @@ import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import ROUTES from 'constants/routes';
import useComponentPermission from 'hooks/useComponentPermission';
import useFetch from 'hooks/useFetch';
import { useCallback, useEffect, useState } from 'react';
import { useCallback, useEffect, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -83,16 +83,22 @@ function BasicInfo({
window.open(ROUTES.CHANNELS_NEW, '_blank');
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const hasLoggedEvent = useRef(false);
useEffect(() => {
if (!channels.loading && isNewRule) {
if (!channels.loading && isNewRule && !hasLoggedEvent.current) {
logEvent('Alert: New alert creation page visited', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
numberOfChannels: channels?.payload?.length,
});
hasLoggedEvent.current = true;
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [channels.payload, channels.loading]);
}, [channels.loading]);
const refetchChannels = async (): Promise<void> => {
await channels.refetch();
};
return (
<>
@@ -197,7 +203,7 @@ function BasicInfo({
{!shouldBroadCastToAllChannels && (
<Tooltip
title={
noChannels
noChannels && !addNewChannelPermission
? 'No channels. Ask an admin to create a notification channel'
: undefined
}
@@ -212,10 +218,10 @@ function BasicInfo({
]}
>
<ChannelSelect
disabled={
shouldBroadCastToAllChannels || noChannels || !!channels.loading
}
onDropdownOpen={refetchChannels}
disabled={shouldBroadCastToAllChannels}
currentValue={alertDef.preferredChannels}
handleCreateNewChannels={handleCreateNewChannels}
channels={channels}
onSelectChannels={(preferredChannels): void => {
setAlertDef({

View File

@@ -1,24 +1,33 @@
import { Select } from 'antd';
import { PlusOutlined } from '@ant-design/icons';
import { Select, Spin } from 'antd';
import useComponentPermission from 'hooks/useComponentPermission';
import { State } from 'hooks/useFetch';
import { useNotifications } from 'hooks/useNotifications';
import { ReactNode } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { PayloadProps } from 'types/api/channels/getAll';
import AppReducer from 'types/reducer/app';
import { StyledSelect } from './styles';
import { StyledCreateChannelOption, StyledSelect } from './styles';
export interface ChannelSelectProps {
disabled?: boolean;
currentValue?: string[];
onSelectChannels: (s: string[]) => void;
onDropdownOpen: () => void;
channels: State<PayloadProps | undefined>;
handleCreateNewChannels: () => void;
}
function ChannelSelect({
disabled,
currentValue,
onSelectChannels,
onDropdownOpen,
channels,
handleCreateNewChannels,
}: ChannelSelectProps): JSX.Element | null {
// init namespace for translations
const { t } = useTranslation('alerts');
@@ -26,6 +35,10 @@ function ChannelSelect({
const { notifications } = useNotifications();
const handleChange = (value: string[]): void => {
if (value.includes('add-new-channel')) {
handleCreateNewChannels();
return;
}
onSelectChannels(value);
};
@@ -35,9 +48,27 @@ function ChannelSelect({
description: channels.errorMessage,
});
}
const { role } = useSelector<AppState, AppReducer>((state) => state.app);
const [addNewChannelPermission] = useComponentPermission(
['add_new_channel'],
role,
);
const renderOptions = (): ReactNode[] => {
const children: ReactNode[] = [];
if (!channels.loading && addNewChannelPermission) {
children.push(
<Select.Option key="add-new-channel" value="add-new-channel">
<StyledCreateChannelOption>
<PlusOutlined />
Create a new channel
</StyledCreateChannelOption>
</Select.Option>,
);
}
if (
channels.loading ||
channels.payload === undefined ||
@@ -56,6 +87,7 @@ function ChannelSelect({
return children;
};
return (
<StyledSelect
disabled={disabled}
@@ -65,6 +97,12 @@ function ChannelSelect({
placeholder={t('placeholder_channel_select')}
data-testid="alert-channel-select"
value={currentValue}
notFoundContent={channels.loading && <Spin size="small" />}
onDropdownVisibleChange={(open): void => {
if (open) {
onDropdownOpen();
}
}}
onChange={(value): void => {
handleChange(value as string[]);
}}

View File

@@ -4,3 +4,10 @@ import styled from 'styled-components';
export const StyledSelect = styled(Select)`
border-radius: 4px;
`;
export const StyledCreateChannelOption = styled.div`
color: var(--bg-robin-500);
display: flex;
align-items: center;
gap: 8px;
`;

View File

@@ -25,6 +25,7 @@ import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
@@ -35,6 +36,7 @@ import { AlertDef } from 'types/api/alerts/def';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { GlobalReducer } from 'types/reducer/globalTime';
import uPlot from 'uplot';
import { getGraphType } from 'utils/getGraphType';
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
import { getTimeRange } from 'utils/getTimeRange';
@@ -201,6 +203,8 @@ function ChartPreview({
[dispatch, location.pathname, urlQuery],
);
const { timezone } = useTimezone();
const options = useMemo(
() =>
getUPlotChartOptions({
@@ -236,6 +240,9 @@ function ChartPreview({
softMax: null,
softMin: null,
panelType: graphType,
tzDate: (timestamp: number) =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
timezone: timezone?.value,
}),
[
yAxisUnit,
@@ -250,6 +257,7 @@ function ChartPreview({
optionName,
alertDef?.condition.targetUnit,
graphType,
timezone?.value,
],
);

View File

@@ -102,9 +102,9 @@ function RuleOptions({
<Select.Option value="4">{t('option_notequal')}</Select.Option>
</>
)}
{/* the value 5 and 6 are reserved for above or equal and below or equal */}
{ruleType === 'anomaly_rule' && (
<Select.Option value="5">{t('option_above_below')}</Select.Option>
<Select.Option value="7">{t('option_above_below')}</Select.Option>
)}
</InlineSelect>
);

View File

@@ -4,6 +4,7 @@ import { Popover, Typography } from 'antd';
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useTimezone } from 'providers/Timezone';
import { useEffect } from 'react';
import { toFixed } from 'utils/toFixed';
@@ -32,13 +33,17 @@ function Span(props: SpanLengthProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const { time, timeUnitName } = convertTimeToRelevantUnit(inMsCount);
const { timezone } = useTimezone();
useEffect(() => {
document.documentElement.scrollTop = document.documentElement.clientHeight;
document.documentElement.scrollLeft = document.documentElement.clientWidth;
}, []);
const getContent = (): JSX.Element => {
const timeStamp = dayjs(startTime).format('h:mm:ss:SSS A');
const timeStamp = dayjs(startTime)
.tz(timezone.value)
.format('h:mm:ss:SSS A (UTC Z)');
const startTimeInMs = startTime - globalStart;
return (
<div>

View File

@@ -18,8 +18,13 @@
font-style: normal;
font-weight: var(--font-weight-normal);
line-height: 28px;
/* 155.556% */
letter-spacing: -0.09px;
width: 72%; // arbitrary number to match input width
display: flex;
align-items: center;
gap: 8px;
justify-content: space-between;
}
.subtitle {
@@ -356,6 +361,8 @@
flex: 1;
.heading {
margin-bottom: 8px;
.title {
font-size: 12px;
}
@@ -370,6 +377,18 @@
.ant-input-number {
width: 80%;
}
.no-limit {
display: flex;
align-items: center;
gap: 8px;
margin-bottom: 24px;
font-weight: 700;
font-size: 12px;
color: var(--bg-forest-400);
}
}
.signal-limit-view-mode {

View File

@@ -12,6 +12,7 @@ import {
Modal,
Row,
Select,
Switch,
Table,
TablePaginationConfig,
TableProps as AntDTableProps,
@@ -30,11 +31,11 @@ import { AxiosError } from 'axios';
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import Tags from 'components/Tags/Tags';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import dayjs, { Dayjs } from 'dayjs';
import dayjs from 'dayjs';
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useNotifications } from 'hooks/useNotifications';
import { isNil } from 'lodash-es';
import { isNil, isUndefined } from 'lodash-es';
import {
ArrowUpRight,
CalendarClock,
@@ -50,6 +51,7 @@ import {
Trash2,
X,
} from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { ChangeEvent, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useMutation } from 'react-query';
@@ -69,7 +71,10 @@ const { Option } = Select;
const BYTES = 1073741824;
export const disabledDate = (current: Dayjs): boolean =>
// Using any type here because antd's DatePicker expects its own internal Dayjs type
// which conflicts with our project's Dayjs type that has additional plugins (tz, utc etc).
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types
export const disabledDate = (current: any): boolean =>
// Disable all dates before today
current && current < dayjs().endOf('day');
@@ -392,86 +397,11 @@ function MultiIngestionSettings(): JSX.Element {
const gbToBytes = (gb: number): number => Math.round(gb * 1024 ** 3);
const getFormattedTime = (date: string): string =>
dayjs(date).format('MMM DD,YYYY, hh:mm a');
const handleAddLimit = (
APIKey: IngestionKeyProps,
signalName: string,
): void => {
setActiveSignal({
id: signalName,
signal: signalName,
config: {},
});
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
const payload = {
keyID: APIKey.id,
signal: signalName,
config: {
day: {
size: gbToBytes(dailyLimit),
},
second: {
size: gbToBytes(secondsLimit),
},
},
};
createLimitForIngestionKey(payload);
};
const handleUpdateLimit = (
APIKey: IngestionKeyProps,
signal: LimitProps,
): void => {
setActiveSignal(signal);
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
const payload = {
limitID: signal.id,
signal: signal.signal,
config: {
day: {
size: gbToBytes(dailyLimit),
},
second: {
size: gbToBytes(secondsLimit),
},
},
};
updateLimitForIngestionKey(payload);
};
const bytesToGb = (size: number | undefined): number => {
if (!size) {
return 0;
}
return size / BYTES;
};
const enableEditLimitMode = (
APIKey: IngestionKeyProps,
signal: LimitProps,
): void => {
setActiveAPIKey(APIKey);
setActiveSignal(signal);
addEditLimitForm.setFieldsValue({
dailyLimit: bytesToGb(signal?.config?.day?.size || 0),
secondsLimit: bytesToGb(signal?.config?.second?.size || 0),
});
setIsEditAddLimitOpen(true);
};
const onDeleteLimitHandler = (): void => {
if (activeSignal && activeSignal?.id) {
deleteLimitForKey(activeSignal.id);
}
};
const getFormattedTime = (
date: string,
formatTimezoneAdjustedTimestamp: (date: string, format: string) => string,
): string =>
formatTimezoneAdjustedTimestamp(date, 'MMM DD,YYYY, hh:mm a (UTC Z)');
const showDeleteLimitModal = (
APIKey: IngestionKeyProps,
@@ -496,17 +426,152 @@ function MultiIngestionSettings(): JSX.Element {
addEditLimitForm.resetFields();
};
const handleAddLimit = (
APIKey: IngestionKeyProps,
signalName: string,
): void => {
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
const payload = {
keyID: APIKey.id,
signal: signalName,
config: {},
};
if (!isUndefined(dailyLimit)) {
payload.config = {
day: {
size: gbToBytes(dailyLimit),
},
};
}
if (!isUndefined(secondsLimit)) {
payload.config = {
...payload.config,
second: {
size: gbToBytes(secondsLimit),
},
};
}
if (isUndefined(dailyLimit) && isUndefined(secondsLimit)) {
// No need to save as no limit is provided, close the edit view and reset active signal and api key
setActiveSignal(null);
setActiveAPIKey(null);
setIsEditAddLimitOpen(false);
setUpdatedTags([]);
hideAddViewModal();
setHasCreateLimitForIngestionKeyError(false);
return;
}
createLimitForIngestionKey(payload);
};
const handleUpdateLimit = (
APIKey: IngestionKeyProps,
signal: LimitProps,
): void => {
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
const payload = {
limitID: signal.id,
signal: signal.signal,
config: {},
};
if (isUndefined(dailyLimit) && isUndefined(secondsLimit)) {
showDeleteLimitModal(APIKey, signal);
return;
}
if (!isUndefined(dailyLimit)) {
payload.config = {
day: {
size: gbToBytes(dailyLimit),
},
};
}
if (!isUndefined(secondsLimit)) {
payload.config = {
...payload.config,
second: {
size: gbToBytes(secondsLimit),
},
};
}
updateLimitForIngestionKey(payload);
};
const bytesToGb = (size: number | undefined): number => {
if (!size) {
return 0;
}
return size / BYTES;
};
const enableEditLimitMode = (
APIKey: IngestionKeyProps,
signal: LimitProps,
): void => {
setActiveAPIKey(APIKey);
setActiveSignal({
...signal,
config: {
...signal.config,
day: {
...signal.config?.day,
enabled: !isNil(signal?.config?.day?.size),
},
second: {
...signal.config?.second,
enabled: !isNil(signal?.config?.second?.size),
},
},
});
addEditLimitForm.setFieldsValue({
dailyLimit: bytesToGb(signal?.config?.day?.size || 0),
secondsLimit: bytesToGb(signal?.config?.second?.size || 0),
enableDailyLimit: !isNil(signal?.config?.day?.size),
enableSecondLimit: !isNil(signal?.config?.second?.size),
});
setIsEditAddLimitOpen(true);
};
const onDeleteLimitHandler = (): void => {
if (activeSignal && activeSignal?.id) {
deleteLimitForKey(activeSignal.id);
}
};
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns: AntDTableProps<IngestionKeyProps>['columns'] = [
{
title: 'Ingestion Key',
key: 'ingestion-key',
// eslint-disable-next-line sonarjs/cognitive-complexity
render: (APIKey: IngestionKeyProps): JSX.Element => {
const createdOn = getFormattedTime(APIKey.created_at);
const createdOn = getFormattedTime(
APIKey.created_at,
formatTimezoneAdjustedTimestamp,
);
const formattedDateAndTime =
APIKey && APIKey?.expires_at && getFormattedTime(APIKey?.expires_at);
APIKey &&
APIKey?.expires_at &&
getFormattedTime(APIKey?.expires_at, formatTimezoneAdjustedTimestamp);
const updatedOn = getFormattedTime(APIKey?.updated_at);
const updatedOn = getFormattedTime(
APIKey?.updated_at,
formatTimezoneAdjustedTimestamp,
);
const limits: { [key: string]: LimitProps } = {};
@@ -684,50 +749,108 @@ function MultiIngestionSettings(): JSX.Element {
<div className="signal-limit-edit-mode">
<div className="daily-limit">
<div className="heading">
<div className="title"> Daily limit </div>
<div className="title">
Daily limit
<div className="limit-enable-disable-toggle">
<Form.Item name="enableDailyLimit">
<Switch
size="small"
checked={activeSignal?.config?.day?.enabled}
onChange={(value): void => {
setActiveSignal({
...activeSignal,
config: {
...activeSignal.config,
day: {
...activeSignal.config?.day,
enabled: value,
},
},
});
}}
/>
</Form.Item>
</div>
</div>
<div className="subtitle">
Add a limit for data ingested daily{' '}
Add a limit for data ingested daily
</div>
</div>
<div className="size">
<Form.Item name="dailyLimit">
<InputNumber
addonAfter={
<Select defaultValue="GiB" disabled>
<Option value="TiB"> TiB</Option>
<Option value="GiB"> GiB</Option>
<Option value="MiB"> MiB </Option>
<Option value="KiB"> KiB </Option>
</Select>
}
/>
</Form.Item>
{activeSignal?.config?.day?.enabled ? (
<Form.Item name="dailyLimit" key="dailyLimit">
<InputNumber
disabled={!activeSignal?.config?.day?.enabled}
key="dailyLimit"
addonAfter={
<Select defaultValue="GiB" disabled>
<Option value="TiB"> TiB</Option>
<Option value="GiB"> GiB</Option>
<Option value="MiB"> MiB </Option>
<Option value="KiB"> KiB </Option>
</Select>
}
/>
</Form.Item>
) : (
<div className="no-limit">
<Infinity size={16} /> NO LIMIT
</div>
)}
</div>
</div>
<div className="second-limit">
<div className="heading">
<div className="title"> Per Second limit </div>
<div className="title">
Per Second limit{' '}
<div className="limit-enable-disable-toggle">
<Form.Item name="enableSecondLimit">
<Switch
size="small"
checked={activeSignal?.config?.second?.enabled}
onChange={(value): void => {
setActiveSignal({
...activeSignal,
config: {
...activeSignal.config,
second: {
...activeSignal.config?.second,
enabled: value,
},
},
});
}}
/>
</Form.Item>
</div>
</div>
<div className="subtitle">
{' '}
Add a limit for data ingested every second{' '}
Add a limit for data ingested every second
</div>
</div>
<div className="size">
<Form.Item name="secondsLimit">
<InputNumber
addonAfter={
<Select defaultValue="GiB" disabled>
<Option value="TiB"> TiB</Option>
<Option value="GiB"> GiB</Option>
<Option value="MiB"> MiB </Option>
<Option value="KiB"> KiB </Option>
</Select>
}
/>
</Form.Item>
{activeSignal?.config?.second?.enabled ? (
<Form.Item name="secondsLimit" key="secondsLimit">
<InputNumber
key="secondsLimit"
disabled={!activeSignal?.config?.second?.enabled}
addonAfter={
<Select defaultValue="GiB" disabled>
<Option value="TiB"> TiB</Option>
<Option value="GiB"> GiB</Option>
<Option value="MiB"> MiB </Option>
<Option value="KiB"> KiB </Option>
</Select>
}
/>
</Form.Item>
) : (
<div className="no-limit">
<Infinity size={16} /> NO LIMIT
</div>
)}
</div>
</div>
</div>

View File

@@ -1,8 +1,20 @@
import { Typography } from 'antd';
import { ColumnsType } from 'antd/lib/table';
import { ResizeTable } from 'components/ResizeTable';
import { useTimezone } from 'providers/Timezone';
import { useTranslation } from 'react-i18next';
import { License } from 'types/api/licenses/def';
function ValidityColumn({ value }: { value: string }): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
return (
<Typography>
{formatTimezoneAdjustedTimestamp(value, 'YYYY-MM-DD HH:mm:ss (UTC Z)')}
</Typography>
);
}
function ListLicenses({ licenses }: ListLicensesProps): JSX.Element {
const { t } = useTranslation(['licenses']);
@@ -23,12 +35,14 @@ function ListLicenses({ licenses }: ListLicensesProps): JSX.Element {
title: t('column_valid_from'),
dataIndex: 'ValidFrom',
key: 'valid from',
render: (value: string): JSX.Element => ValidityColumn({ value }),
width: 80,
},
{
title: t('column_valid_until'),
dataIndex: 'ValidUntil',
key: 'valid until',
render: (value: string): JSX.Element => ValidityColumn({ value }),
width: 80,
},
];

View File

@@ -57,6 +57,7 @@ import {
// see more: https://github.com/lucide-icons/lucide/issues/94
import { handleContactSupport } from 'pages/Integrations/utils';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import {
ChangeEvent,
Key,
@@ -343,31 +344,13 @@ function DashboardsList(): JSX.Element {
}
}, [state.error, state.value, t, notifications]);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
function getFormattedTime(dashboard: Dashboard, option: string): string {
const timeOptions: Intl.DateTimeFormatOptions = {
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hour12: false,
};
const formattedTime = new Date(get(dashboard, option, '')).toLocaleTimeString(
'en-US',
timeOptions,
return formatTimezoneAdjustedTimestamp(
get(dashboard, option, ''),
'MMM D, YYYY ⎯ HH:mm:ss',
);
const dateOptions: Intl.DateTimeFormatOptions = {
month: 'short',
day: 'numeric',
year: 'numeric',
};
const formattedDate = new Date(get(dashboard, option, '')).toLocaleDateString(
'en-US',
dateOptions,
);
// Combine time and date
return `${formattedDate}${formattedTime}`;
}
const onLastUpdated = (time: string): string => {
@@ -410,31 +393,11 @@ function DashboardsList(): JSX.Element {
title: 'Dashboards',
key: 'dashboard',
render: (dashboard: Data, _, index): JSX.Element => {
const timeOptions: Intl.DateTimeFormatOptions = {
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hour12: false,
};
const formattedTime = new Date(dashboard.createdAt).toLocaleTimeString(
'en-US',
timeOptions,
const formattedDateAndTime = formatTimezoneAdjustedTimestamp(
dashboard.createdAt,
'MMM D, YYYY ⎯ HH:mm:ss',
);
const dateOptions: Intl.DateTimeFormatOptions = {
month: 'short',
day: 'numeric',
year: 'numeric',
};
const formattedDate = new Date(dashboard.createdAt).toLocaleDateString(
'en-US',
dateOptions,
);
// Combine time and date
const formattedDateAndTime = `${formattedDate}${formattedTime}`;
const getLink = (): string => `${ROUTES.ALL_DASHBOARD}/${dashboard.id}`;
const onClickHandler = (event: React.MouseEvent<HTMLElement>): void => {

View File

@@ -82,9 +82,8 @@ function ImportJSON({
const dashboardData = JSON.parse(editorValue) as DashboardData;
// Add validation for uuid
if (dashboardData.uuid !== undefined && dashboardData.uuid.trim() === '') {
// silently remove uuid if it is empty
// Remove uuid from the dashboard data, in all cases - empty, duplicate or any valid not duplicate uuid
if (dashboardData.uuid !== undefined) {
delete dashboardData.uuid;
}

View File

@@ -8,10 +8,12 @@ import { useResizeObserver } from 'hooks/useDimensions';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useTimezone } from 'providers/Timezone';
import { useMemo, useRef } from 'react';
import { useQueries, UseQueryResult } from 'react-query';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import {
getHostQueryPayload,
@@ -73,6 +75,8 @@ function NodeMetrics({
[queries],
);
const { timezone } = useTimezone();
const options = useMemo(
() =>
queries.map(({ data }, idx) =>
@@ -86,6 +90,9 @@ function NodeMetrics({
minTimeScale: start,
maxTimeScale: end,
verticalLineTimestamp,
tzDate: (timestamp: number) =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
timezone: timezone?.value,
}),
),
[
@@ -96,6 +103,7 @@ function NodeMetrics({
start,
verticalLineTimestamp,
end,
timezone?.value,
],
);

View File

@@ -8,10 +8,12 @@ import { useResizeObserver } from 'hooks/useDimensions';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useTimezone } from 'providers/Timezone';
import { useMemo, useRef } from 'react';
import { useQueries, UseQueryResult } from 'react-query';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { getPodQueryPayload, podWidgetInfo } from './constants';
@@ -60,6 +62,7 @@ function PodMetrics({
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
[queries],
);
const { timezone } = useTimezone();
const options = useMemo(
() =>
@@ -74,9 +77,20 @@ function PodMetrics({
minTimeScale: start,
maxTimeScale: end,
verticalLineTimestamp,
tzDate: (timestamp: number) =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
timezone: timezone?.value,
}),
),
[queries, isDarkMode, dimensions, start, verticalLineTimestamp, end],
[
queries,
isDarkMode,
dimensions,
start,
end,
verticalLineTimestamp,
timezone?.value,
],
);
const renderCardContent = (

View File

@@ -11,7 +11,8 @@ import ROUTES from 'constants/routes';
import dompurify from 'dompurify';
import { isEmpty } from 'lodash-es';
import { ArrowDownToDot, ArrowUpFromDot, Ellipsis } from 'lucide-react';
import { useMemo, useState } from 'react';
import { useTimezone } from 'providers/Timezone';
import React, { useMemo, useState } from 'react';
import { useLocation } from 'react-router-dom';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
@@ -68,6 +69,8 @@ export function TableViewActions(
const [isOpen, setIsOpen] = useState<boolean>(false);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
if (record.field === 'body') {
const parsedBody = recursiveParseJSON(fieldData.value);
if (!isEmpty(parsedBody)) {
@@ -100,33 +103,44 @@ export function TableViewActions(
);
}
let cleanTimestamp: string;
if (record.field === 'timestamp') {
cleanTimestamp = fieldData.value.replace(/^["']|["']$/g, '');
}
const renderFieldContent = (): JSX.Element => {
const commonStyles: React.CSSProperties = {
color: Color.BG_SIENNA_400,
whiteSpace: 'pre-wrap',
tabSize: 4,
};
switch (record.field) {
case 'body':
return <span style={commonStyles} dangerouslySetInnerHTML={bodyHtml} />;
case 'timestamp':
return (
<span style={commonStyles}>
{formatTimezoneAdjustedTimestamp(
cleanTimestamp,
'MM/DD/YYYY, HH:mm:ss.SSS (UTC Z)',
)}
</span>
);
default:
return (
<span style={commonStyles}>{removeEscapeCharacters(fieldData.value)}</span>
);
}
};
return (
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
{record.field === 'body' ? (
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
<span
style={{
color: Color.BG_SIENNA_400,
whiteSpace: 'pre-wrap',
tabSize: 4,
}}
dangerouslySetInnerHTML={bodyHtml}
/>
</CopyClipboardHOC>
) : (
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
<span
style={{
color: Color.BG_SIENNA_400,
whiteSpace: 'pre-wrap',
tabSize: 4,
}}
>
{removeEscapeCharacters(fieldData.value)}
</span>
</CopyClipboardHOC>
)}
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
{renderFieldContent()}
</CopyClipboardHOC>
{!isListViewPanel && (
<span className="action-btn">
<Tooltip title="Filter for value">

View File

@@ -15,6 +15,7 @@ import { useLogsData } from 'hooks/useLogsData';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { FlatLogData } from 'lib/logs/flatLogData';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { useTimezone } from 'providers/Timezone';
import {
Dispatch,
HTMLAttributes,
@@ -76,7 +77,12 @@ function LogsPanelComponent({
});
};
const columns = getLogPanelColumnsList(widget.selectedLogFields);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns = getLogPanelColumnsList(
widget.selectedLogFields,
formatTimezoneAdjustedTimestamp,
);
const dataLength =
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;

View File

@@ -1,6 +1,7 @@
import { ColumnsType } from 'antd/es/table';
import { Typography } from 'antd/lib';
import { OPERATORS } from 'constants/queryBuilder';
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
// import Typography from 'antd/es/typography/Typography';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { ReactNode } from 'react';
@@ -13,18 +14,31 @@ import { v4 as uuid } from 'uuid';
export const getLogPanelColumnsList = (
selectedLogFields: Widgets['selectedLogFields'],
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string,
): ColumnsType<RowData> => {
const initialColumns: ColumnsType<RowData> = [];
const columns: ColumnsType<RowData> =
selectedLogFields?.map((field: IField) => {
const { name } = field;
return {
title: name,
dataIndex: name,
key: name,
width: name === 'body' ? 350 : 100,
render: (value: ReactNode): JSX.Element => {
if (name === 'timestamp') {
return (
<Typography.Text>
{formatTimezoneAdjustedTimestamp(value as string)}
</Typography.Text>
);
}
if (name === 'body') {
return (
<Typography.Paragraph ellipsis={{ rows: 1 }} data-testid={name}>

View File

@@ -0,0 +1,81 @@
.timezone-adaption {
padding: 16px;
background: var(--bg-ink-400);
border: 1px solid var(--bg-ink-500);
border-radius: 4px;
&__header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 8px;
}
&__title {
color: var(--bg-vanilla-300);
font-size: 14px;
font-weight: 500;
margin: 0;
}
&__description {
color: var(--bg-vanilla-400);
font-size: 14px;
line-height: 20px;
margin: 0 0 12px 0;
}
&__note {
display: flex;
align-items: center;
justify-content: space-between;
padding: 7.5px 12px;
background: rgba(78, 116, 248, 0.1);
border: 1px solid rgba(78, 116, 248, 0.1);
border-radius: 4px;
}
&__bullet {
color: var(--bg-robin-400);
font-size: 16px;
line-height: 20px;
}
&__note-text-container {
display: flex;
align-items: center;
gap: 10px;
}
&__note-text {
display: flex;
align-items: center;
gap: 4px;
color: var(--bg-robin-400);
font-size: 14px;
line-height: 20px;
}
&__note-text-overridden {
display: flex;
align-items: center;
padding: 0 2px;
background: rgba(171, 189, 255, 0.04);
border-radius: 2px;
font-size: 12px;
line-height: 16px;
color: var(--bg-vanilla-100);
}
&__clear-override {
display: flex;
align-items: center;
gap: 6px;
background: transparent;
border: none;
padding: 0;
color: var(--bg-robin-300);
font-size: 12px;
line-height: 16px; /* 133.333% */
letter-spacing: 0.12px;
cursor: pointer;
}
}

View File

@@ -0,0 +1,78 @@
import './TimezoneAdaptation.styles.scss';
import { Color } from '@signozhq/design-tokens';
import { Switch } from 'antd';
import { Delete } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { useMemo, useState } from 'react';
function TimezoneAdaptation(): JSX.Element {
const { timezone, browserTimezone, updateTimezone } = useTimezone();
const isTimezoneOverridden = useMemo(
() => timezone?.offset !== browserTimezone.offset,
[timezone, browserTimezone],
);
const [isAdaptationEnabled, setIsAdaptationEnabled] = useState(true);
const getSwitchStyles = (): React.CSSProperties => ({
backgroundColor:
isAdaptationEnabled && isTimezoneOverridden ? Color.BG_AMBER_400 : undefined,
});
const handleOverrideClear = (): void => {
updateTimezone(browserTimezone);
};
return (
<div className="timezone-adaption">
<div className="timezone-adaption__header">
<h2 className="timezone-adaption__title">Adapt to my timezone</h2>
<Switch
checked={isAdaptationEnabled}
onChange={setIsAdaptationEnabled}
style={getSwitchStyles()}
/>
</div>
<p className="timezone-adaption__description">
Adapt the timestamps shown in the SigNoz console to my active timezone.
</p>
<div className="timezone-adaption__note">
<div className="timezone-adaption__note-text-container">
<span className="timezone-adaption__bullet"></span>
<span className="timezone-adaption__note-text">
{isTimezoneOverridden ? (
<>
Your current timezone is overridden to
<span className="timezone-adaption__note-text-overridden">
{timezone?.offset}
</span>
</>
) : (
<>
You can override the timezone adaption for any view with the time
picker.
</>
)}
</span>
</div>
{!!isTimezoneOverridden && (
<button
type="button"
className="timezone-adaption__clear-override"
onClick={handleOverrideClear}
>
<Delete height={12} width={12} color={Color.BG_ROBIN_300} />
Clear override
</button>
)}
</div>
</div>
);
}
export default TimezoneAdaptation;

View File

@@ -7,6 +7,7 @@ import { LogOut, Moon, Sun } from 'lucide-react';
import { useState } from 'react';
import Password from './Password';
import TimezoneAdaptation from './TimezoneAdaptation/TimezoneAdaptation';
import UserInfo from './UserInfo';
function MySettings(): JSX.Element {
@@ -78,6 +79,8 @@ function MySettings(): JSX.Element {
<Password />
</div>
<TimezoneAdaptation />
<Button
className="flexBtn"
onClick={(): void => Logout()}

View File

@@ -14,7 +14,9 @@ import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { cloneDeep, isEqual, isUndefined } from 'lodash-es';
import _noop from 'lodash-es/noop';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import { useEffect, useMemo, useRef, useState } from 'react';
import uPlot from 'uplot';
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
import { getTimeRange } from 'utils/getTimeRange';
@@ -105,6 +107,8 @@ function UplotPanelWrapper({
}
}, [graphVisibility, hiddenGraph, widget.panelTypes, widget?.stackedBarChart]);
const { timezone } = useTimezone();
const options = useMemo(
() =>
getUPlotChartOptions({
@@ -128,6 +132,9 @@ function UplotPanelWrapper({
hiddenGraph,
setHiddenGraph,
customTooltipElement,
tzDate: (timestamp: number) =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
timezone: timezone?.value,
}),
[
widget?.id,
@@ -150,6 +157,7 @@ function UplotPanelWrapper({
currentQuery,
hiddenGraph,
customTooltipElement,
timezone?.value,
],
);

View File

@@ -1,8 +1,14 @@
import dayjs from 'dayjs';
import { useTimezone } from 'providers/Timezone';
function DeploymentTime(deployTime: string): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
return (
<span>{dayjs(deployTime).locale('en').format('MMMM DD, YYYY hh:mm A')}</span>
<span>
{formatTimezoneAdjustedTimestamp(
deployTime,
'MMMM DD, YYYY hh:mm A (UTC Z)',
)}{' '}
</span>
);
}

View File

@@ -3,8 +3,8 @@ import './styles.scss';
import { ExpandAltOutlined } from '@ant-design/icons';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import dayjs from 'dayjs';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useTimezone } from 'providers/Timezone';
import { ILog } from 'types/api/logs/log';
function LogsList({ logs }: LogsListProps): JSX.Element {
@@ -18,12 +18,17 @@ function LogsList({ logs }: LogsListProps): JSX.Element {
const makeLogDetailsHandler = (log: ILog) => (): void => onSetActiveLog(log);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
return (
<div className="logs-preview-list-container">
{logs.map((log) => (
<div key={log.id} className="logs-preview-list-item">
<div className="logs-preview-list-item-timestamp">
{dayjs(log.timestamp).format('MMM DD HH:mm:ss.SSS')}
{formatTimezoneAdjustedTimestamp(
log.timestamp,
'MMM DD HH:mm:ss.SSS (UTC Z)',
)}
</div>
<div className="logs-preview-list-item-body">{log.body}</div>
<div

View File

@@ -1,4 +1,4 @@
import dayjs from 'dayjs';
import { useTimezone } from 'providers/Timezone';
import React from 'react';
import { PipelineData, ProcessorData } from 'types/api/pipeline/def';
@@ -6,13 +6,18 @@ import { PipelineIndexIcon } from '../AddNewProcessor/styles';
import { ColumnDataStyle, ListDataStyle, ProcessorIndexIcon } from '../styles';
import PipelineFilterSummary from './PipelineFilterSummary';
function CreatedAtComponent({ record }: { record: Record }): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
return (
<ColumnDataStyle>
{formatTimezoneAdjustedTimestamp(record, 'MMMM DD, YYYY hh:mm A (UTC Z)')}
</ColumnDataStyle>
);
}
const componentMap: ComponentMap = {
orderId: ({ record }) => <PipelineIndexIcon>{record}</PipelineIndexIcon>,
createdAt: ({ record }) => (
<ColumnDataStyle>
{dayjs(record).locale('en').format('MMMM DD, YYYY hh:mm A')}
</ColumnDataStyle>
),
createdAt: ({ record }) => <CreatedAtComponent record={record} />,
id: ({ record }) => <ProcessorIndexIcon>{record}</ProcessorIndexIcon>,
name: ({ record }) => <ListDataStyle>{record}</ListDataStyle>,
filter: ({ record }) => <PipelineFilterSummary filter={record} />,

View File

@@ -17,6 +17,7 @@ import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { isEmpty } from 'lodash-es';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
@@ -26,6 +27,7 @@ import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
import { GlobalReducer } from 'types/reducer/globalTime';
import uPlot from 'uplot';
import { getTimeRange } from 'utils/getTimeRange';
import { Container } from './styles';
@@ -118,6 +120,8 @@ function TimeSeriesView({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const { timezone } = useTimezone();
const chartOptions = getUPlotChartOptions({
onDragSelect,
yAxisUnit: yAxisUnit || '',
@@ -131,6 +135,9 @@ function TimeSeriesView({
maxTimeScale,
softMax: null,
softMin: null,
tzDate: (timestamp: number) =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
timezone: timezone?.value,
});
return (

View File

@@ -28,6 +28,7 @@ import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { isObject } from 'lodash-es';
import { Check, Copy, Info, Send, Undo } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
import { connect, useSelector } from 'react-redux';
@@ -613,6 +614,8 @@ function DateTimeSelection({
);
};
const { timezone } = useTimezone();
return (
<div className="date-time-selector">
{showResetButton && selectedTime !== defaultRelativeTime && (
@@ -664,8 +667,8 @@ function DateTimeSelection({
setIsValidteRelativeTime(isValid);
}}
selectedValue={getInputLabel(
dayjs(minTime / 1000000),
dayjs(maxTime / 1000000),
dayjs(minTime / 1000000).tz(timezone.value),
dayjs(maxTime / 1000000).tz(timezone.value),
selectedTime,
)}
data-testid="dropDown"

View File

@@ -24,6 +24,7 @@ import history from 'lib/history';
import { map } from 'lodash-es';
import { PanelRight } from 'lucide-react';
import { SPAN_DETAILS_LEFT_COL_WIDTH } from 'pages/TraceDetail/constants';
import { useTimezone } from 'providers/Timezone';
import { useEffect, useMemo, useState } from 'react';
import { ITraceForest, PayloadProps } from 'types/api/trace/getTraceItem';
import { getSpanTreeMetadata } from 'utils/getSpanTreeMetadata';
@@ -139,6 +140,8 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
return (
<StyledRow styledclass={[Flex({ flex: 1 })]}>
<StyledCol flex="auto" styledclass={styles.leftContainer}>
@@ -195,7 +198,9 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
{isGlobalTimeVisible && (
<styles.TimeStampContainer flex={`${SPAN_DETAILS_LEFT_COL_WIDTH}px`}>
<Typography>
{dayjs(traceMetaData.globalStart).format('hh:mm:ss a MM/DD')}
{dayjs(traceMetaData.globalStart)
.tz(timezone.value)
.format('hh:mm:ss a (UTC Z) MM/DD')}
</Typography>
</styles.TimeStampContainer>
)}

View File

@@ -15,6 +15,7 @@ import useDragColumns from 'hooks/useDragColumns';
import { getDraggedColumns } from 'hooks/useDragColumns/utils';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { useTimezone } from 'providers/Timezone';
import { memo, useCallback, useMemo } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -97,10 +98,15 @@ function ListView({ isFilterApplied }: ListViewProps): JSX.Element {
queryTableDataResult,
]);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns = useMemo(() => {
const updatedColumns = getListColumns(options?.selectColumns || []);
const updatedColumns = getListColumns(
options?.selectColumns || [],
formatTimezoneAdjustedTimestamp,
);
return getDraggedColumns(updatedColumns, draggedColumns);
}, [options?.selectColumns, draggedColumns]);
}, [options?.selectColumns, formatTimezoneAdjustedTimestamp, draggedColumns]);
const transformedQueryTableData = useMemo(
() => transformDataWithDate(queryTableData) || [],

View File

@@ -3,7 +3,7 @@ import { ColumnsType } from 'antd/es/table';
import ROUTES from 'constants/routes';
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
import { formUrlParams } from 'container/TraceDetail/utils';
import dayjs from 'dayjs';
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { Link } from 'react-router-dom';
import { ILog } from 'types/api/logs/log';
@@ -40,6 +40,10 @@ export const getTraceLink = (record: RowData): string =>
export const getListColumns = (
selectedColumns: BaseAutocompleteData[],
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string | number,
): ColumnsType<RowData> => {
const initialColumns: ColumnsType<RowData> = [
{
@@ -50,8 +54,8 @@ export const getListColumns = (
render: (value, item): JSX.Element => {
const date =
typeof value === 'string'
? dayjs(value).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(value / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
? formatTimezoneAdjustedTimestamp(value, 'YYYY-MM-DD HH:mm:ss.SSS')
: formatTimezoneAdjustedTimestamp(value / 1e6, 'YYYY-MM-DD HH:mm:ss.SSS');
return (
<BlockLink to={getTraceLink(item)}>
<Typography.Text>{date}</Typography.Text>

View File

@@ -15,6 +15,7 @@ import { Pagination } from 'hooks/queryPagination';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import history from 'lib/history';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { useTimezone } from 'providers/Timezone';
import {
Dispatch,
HTMLAttributes,
@@ -49,7 +50,12 @@ function TracesTableComponent({
}));
}, [pagination, setRequestData]);
const columns = getListColumns(widget.selectedTracesFields || []);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns = getListColumns(
widget.selectedTracesFields || [],
formatTimezoneAdjustedTimestamp,
);
const dataLength =
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;

View File

@@ -1,12 +1,12 @@
import { Tag, Typography } from 'antd';
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
import getFormattedDate from 'lib/getFormatedDate';
import { useTimezone } from 'providers/Timezone';
import { Alerts } from 'types/api/alerts/getTriggered';
import Status from '../TableComponents/AlertStatus';
import { TableCell, TableRow } from './styles';
function ExapandableRow({ allAlerts }: ExapandableRowProps): JSX.Element {
const { formatTimezoneAdjustedTimestamp } = useTimezone();
return (
<>
{allAlerts.map((alert) => {
@@ -40,8 +40,9 @@ function ExapandableRow({ allAlerts }: ExapandableRowProps): JSX.Element {
</TableCell>
<TableCell>
<Typography>{`${getFormattedDate(formatedDate)} ${convertDateToAmAndPm(
<Typography>{`${formatTimezoneAdjustedTimestamp(
formatedDate,
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
)}`}</Typography>
</TableCell>

View File

@@ -4,8 +4,7 @@ import { ColumnsType } from 'antd/lib/table';
import { ResizeTable } from 'components/ResizeTable';
import LabelColumn from 'components/TableRenderer/LabelColumn';
import AlertStatus from 'container/TriggeredAlerts/TableComponents/AlertStatus';
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
import getFormattedDate from 'lib/getFormatedDate';
import { useTimezone } from 'providers/Timezone';
import { Alerts } from 'types/api/alerts/getTriggered';
import { Value } from './Filter';
@@ -16,6 +15,7 @@ function NoFilterTable({
selectedFilter,
}: NoFilterTableProps): JSX.Element {
const filteredAlerts = FilterAlerts(allAlerts, selectedFilter);
const { formatTimezoneAdjustedTimestamp } = useTimezone();
// need to add the filter
const columns: ColumnsType<Alerts> = [
@@ -83,15 +83,12 @@ function NoFilterTable({
width: 100,
sorter: (a, b): number =>
new Date(a.startsAt).getTime() - new Date(b.startsAt).getTime(),
render: (date): JSX.Element => {
const formatedDate = new Date(date);
return (
<Typography>{`${getFormattedDate(formatedDate)} ${convertDateToAmAndPm(
formatedDate,
)}`}</Typography>
);
},
render: (date): JSX.Element => (
<Typography>{`${formatTimezoneAdjustedTimestamp(
date,
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
)}`}</Typography>
),
},
];

View File

@@ -1,4 +1,4 @@
import { useEffect, useRef, useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { ErrorResponse, SuccessResponse } from 'types/api';
function useFetch<PayloadProps, FunctionParams>(
@@ -10,7 +10,7 @@ function useFetch<PayloadProps, FunctionParams>(
(arg0: any): Promise<SuccessResponse<PayloadProps> | ErrorResponse>;
},
param?: FunctionParams,
): State<PayloadProps | undefined> {
): State<PayloadProps | undefined> & { refetch: () => Promise<void> } {
const [state, setStates] = useState<State<PayloadProps | undefined>>({
loading: true,
success: null,
@@ -19,37 +19,28 @@ function useFetch<PayloadProps, FunctionParams>(
payload: undefined,
});
const loadingRef = useRef(0);
useEffect(() => {
const fetchData = useCallback(async (): Promise<void> => {
setStates((prev) => ({ ...prev, loading: true }));
try {
(async (): Promise<void> => {
if (state.loading) {
const response = await functions(param);
const response = await functions(param);
if (loadingRef.current === 0) {
loadingRef.current = 1;
if (response.statusCode === 200) {
setStates({
loading: false,
error: false,
success: true,
payload: response.payload,
errorMessage: '',
});
} else {
setStates({
loading: false,
error: true,
success: false,
payload: undefined,
errorMessage: response.error as string,
});
}
}
}
})();
if (response.statusCode === 200) {
setStates({
loading: false,
error: false,
success: true,
payload: response.payload,
errorMessage: '',
});
} else {
setStates({
loading: false,
error: true,
success: false,
payload: undefined,
errorMessage: response.error as string,
});
}
} catch (error) {
setStates({
payload: undefined,
@@ -59,13 +50,16 @@ function useFetch<PayloadProps, FunctionParams>(
errorMessage: error as string,
});
}
return (): void => {
loadingRef.current = 1;
};
}, [functions, param, state.loading]);
}, [functions, param]);
// Initial fetch
useEffect(() => {
fetchData();
}, [fetchData]);
return {
...state,
refetch: fetchData,
};
}

View File

@@ -0,0 +1,103 @@
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import dayjs from 'dayjs';
import timezone from 'dayjs/plugin/timezone';
import utc from 'dayjs/plugin/utc';
import { useCallback, useEffect, useMemo } from 'react';
// Initialize dayjs plugins
dayjs.extend(utc);
dayjs.extend(timezone);
// Types
export type TimestampInput = string | number | Date;
interface CacheEntry {
value: string;
timestamp: number;
}
//
// Constants
const CACHE_SIZE_LIMIT = 1000;
const CACHE_CLEANUP_PERCENTAGE = 0.5; // Remove 50% when limit is reached
function useTimezoneFormatter({
userTimezone,
}: {
userTimezone: Timezone;
}): {
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string;
} {
// Initialize cache using useMemo to persist between renders
const cache = useMemo(() => new Map<string, CacheEntry>(), []);
// Clear cache when timezone changes
useEffect(() => {
cache.clear();
}, [cache, userTimezone]);
const clearCacheEntries = useCallback(() => {
if (cache.size <= CACHE_SIZE_LIMIT) return;
// Sort entries by timestamp (oldest first)
const sortedEntries = Array.from(cache.entries()).sort(
(a, b) => a[1].timestamp - b[1].timestamp,
);
// Calculate how many entries to remove (50% or overflow, whichever is larger)
const entriesToRemove = Math.max(
Math.floor(cache.size * CACHE_CLEANUP_PERCENTAGE),
cache.size - CACHE_SIZE_LIMIT,
);
// Remove oldest entries
sortedEntries.slice(0, entriesToRemove).forEach(([key]) => cache.delete(key));
}, [cache]);
/**
* Formats a timestamp with the user's timezone and caches the result
* @param {TimestampInput} input - The timestamp to format (string, number, or Date)
* @param {string} [format='YYYY-MM-DD HH:mm:ss'] - The desired output format
* @returns {string} The formatted timestamp string in the user's timezone
* @example
* // Input: UTC timestamp
* // User timezone: 'UTC - 4'
* // Returns: "2024-03-14 15:30:00"
* formatTimezoneAdjustedTimestamp('2024-03-14T19:30:00Z')
*/
const formatTimezoneAdjustedTimestamp = useCallback(
(input: TimestampInput, format = 'YYYY-MM-DD HH:mm:ss'): string => {
const timestamp = dayjs(input).valueOf();
const cacheKey = `${timestamp}_${userTimezone?.value}`;
// Check cache first
const cachedValue = cache.get(cacheKey);
if (cachedValue) {
return cachedValue.value;
}
// Format timestamp
const formattedValue = dayjs(input).tz(userTimezone?.value).format(format);
// Update cache
cache.set(cacheKey, {
value: formattedValue,
timestamp: Date.now(),
});
// Clear expired entries and enforce size limit
if (cache.size > CACHE_SIZE_LIMIT) {
clearCacheEntries();
}
return formattedValue;
},
[cache, clearCacheEntries, userTimezone],
);
return { formatTimezoneAdjustedTimestamp };
}
export default useTimezoneFormatter;

View File

@@ -7,6 +7,7 @@ import { AxiosError } from 'axios';
import { ThemeProvider } from 'hooks/useDarkMode';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import posthog from 'posthog-js';
import TimezoneProvider from 'providers/Timezone';
import { createRoot } from 'react-dom/client';
import { HelmetProvider } from 'react-helmet-async';
import { QueryClient, QueryClientProvider } from 'react-query';
@@ -69,14 +70,16 @@ if (container) {
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<HelmetProvider>
<ThemeProvider>
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<AppRoutes />
</Provider>
{process.env.NODE_ENV === 'development' && (
<ReactQueryDevtools initialIsOpen={false} />
)}
</QueryClientProvider>
<TimezoneProvider>
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<AppRoutes />
</Provider>
{process.env.NODE_ENV === 'development' && (
<ReactQueryDevtools initialIsOpen={false} />
)}
</QueryClientProvider>
</TimezoneProvider>
</ThemeProvider>
</HelmetProvider>
</Sentry.ErrorBoundary>,

View File

@@ -55,6 +55,8 @@ export interface GetUPlotChartOptions {
>;
customTooltipElement?: HTMLDivElement;
verticalLineTimestamp?: number;
tzDate?: (timestamp: number) => Date;
timezone?: string;
}
/** the function converts series A , series B , series C to
@@ -158,6 +160,8 @@ export const getUPlotChartOptions = ({
setHiddenGraph,
customTooltipElement,
verticalLineTimestamp,
tzDate,
timezone,
}: GetUPlotChartOptions): uPlot.Options => {
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
@@ -196,6 +200,7 @@ export const getUPlotChartOptions = ({
fill: (): string => '#fff',
},
},
tzDate,
padding: [16, 16, 8, 8],
bands,
scales: {
@@ -222,6 +227,7 @@ export const getUPlotChartOptions = ({
stackBarChart,
isDarkMode,
customTooltipElement,
timezone,
}),
onClickPlugin({
onClick: onClickHandler,

View File

@@ -46,6 +46,7 @@ const generateTooltipContent = (
isHistogramGraphs?: boolean,
isMergedSeries?: boolean,
stackBarChart?: boolean,
timezone?: string,
// eslint-disable-next-line sonarjs/cognitive-complexity
): HTMLElement => {
const container = document.createElement('div');
@@ -69,9 +70,13 @@ const generateTooltipContent = (
series.forEach((item, index) => {
if (index === 0) {
if (isBillingUsageGraphs) {
tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY');
tooltipTitle = dayjs(data[0][idx] * 1000)
.tz(timezone)
.format('MMM DD YYYY');
} else {
tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY HH:mm:ss');
tooltipTitle = dayjs(data[0][idx] * 1000)
.tz(timezone)
.format('MMM DD YYYY h:mm:ss A');
}
} else if (item.show) {
const {
@@ -223,6 +228,7 @@ type ToolTipPluginProps = {
stackBarChart?: boolean;
isDarkMode: boolean;
customTooltipElement?: HTMLDivElement;
timezone?: string;
};
const tooltipPlugin = ({
@@ -234,6 +240,7 @@ const tooltipPlugin = ({
stackBarChart,
isDarkMode,
customTooltipElement,
timezone,
}: // eslint-disable-next-line sonarjs/cognitive-complexity
ToolTipPluginProps): any => {
let over: HTMLElement;
@@ -300,6 +307,7 @@ ToolTipPluginProps): any => {
isHistogramGraphs,
isMergedSeries,
stackBarChart,
timezone,
);
if (customTooltipElement) {
content.appendChild(customTooltipElement);

View File

@@ -31,6 +31,7 @@ import {
Trash2,
X,
} from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { ChangeEvent, useEffect, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
@@ -207,6 +208,8 @@ function SaveView(): JSX.Element {
}
};
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns: TableProps<ViewProps>['columns'] = [
{
title: 'Save View',
@@ -218,31 +221,10 @@ function SaveView(): JSX.Element {
bgColor = extraData.color;
}
const timeOptions: Intl.DateTimeFormatOptions = {
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hour12: false,
};
const formattedTime = new Date(view.createdAt).toLocaleTimeString(
'en-US',
timeOptions,
const formattedDateAndTime = formatTimezoneAdjustedTimestamp(
view.createdAt,
'HH:mm:ss ⎯ MMM D, YYYY (UTC Z)',
);
const dateOptions: Intl.DateTimeFormatOptions = {
month: 'short',
day: 'numeric',
year: 'numeric',
};
const formattedDate = new Date(view.createdAt).toLocaleDateString(
'en-US',
dateOptions,
);
// Combine time and date
const formattedDateAndTime = `${formattedTime}${formattedDate}`;
const isEditDeleteSupported = allowedRoles.includes(role as string);
return (

View File

@@ -0,0 +1,98 @@
import {
getBrowserTimezone,
getTimezoneObjectByTimezoneString,
Timezone,
} from 'components/CustomTimePicker/timezoneUtils';
import { LOCALSTORAGE } from 'constants/localStorage';
import useTimezoneFormatter, {
TimestampInput,
} from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
import React, {
createContext,
useCallback,
useContext,
useMemo,
useState,
} from 'react';
interface TimezoneContextType {
timezone: Timezone;
browserTimezone: Timezone;
updateTimezone: (timezone: Timezone) => void;
formatTimezoneAdjustedTimestamp: (
input: TimestampInput,
format?: string,
) => string;
}
const TimezoneContext = createContext<TimezoneContextType | undefined>(
undefined,
);
function TimezoneProvider({
children,
}: {
children: React.ReactNode;
}): JSX.Element {
const getStoredTimezoneValue = (): Timezone | null => {
try {
const timezoneValue = localStorage.getItem(LOCALSTORAGE.PREFERRED_TIMEZONE);
if (timezoneValue) {
return getTimezoneObjectByTimezoneString(timezoneValue);
}
} catch (error) {
console.error('Error reading timezone from localStorage:', error);
}
return null;
};
const setStoredTimezoneValue = (value: string): void => {
try {
localStorage.setItem(LOCALSTORAGE.PREFERRED_TIMEZONE, value);
} catch (error) {
console.error('Error saving timezone to localStorage:', error);
}
};
const browserTimezone = useMemo(() => getBrowserTimezone(), []);
const [timezone, setTimezone] = useState<Timezone>(
getStoredTimezoneValue() ?? browserTimezone,
);
const updateTimezone = useCallback((timezone: Timezone): void => {
if (!timezone.value) return;
// TODO(shaheer): replace this with user preferences API
setStoredTimezoneValue(timezone.value);
setTimezone(timezone);
}, []);
const { formatTimezoneAdjustedTimestamp } = useTimezoneFormatter({
userTimezone: timezone,
});
const value = React.useMemo(
() => ({
timezone,
browserTimezone,
updateTimezone,
formatTimezoneAdjustedTimestamp,
}),
[timezone, browserTimezone, updateTimezone, formatTimezoneAdjustedTimestamp],
);
return (
<TimezoneContext.Provider value={value}>{children}</TimezoneContext.Provider>
);
}
export const useTimezone = (): TimezoneContextType => {
const context = useContext(TimezoneContext);
if (context === undefined) {
throw new Error('useTimezone must be used within a TimezoneProvider');
}
return context;
};
export default TimezoneProvider;

View File

@@ -8,17 +8,21 @@ export interface LimitProps {
config?: {
day?: {
size?: number;
enabled?: boolean;
};
second?: {
size?: number;
enabled?: boolean;
};
};
metric?: {
day?: {
size?: number;
enabled?: boolean;
};
second?: {
size?: number;
enabled?: boolean;
};
};
}
@@ -27,11 +31,13 @@ export interface AddLimitProps {
keyID: string;
signal: string;
config: {
day: {
size: number;
day?: {
size?: number;
enabled?: boolean;
};
second: {
size: number;
second?: {
size?: number;
enabled?: boolean;
};
};
}
@@ -40,11 +46,13 @@ export interface UpdateLimitProps {
limitID: string;
signal: string;
config: {
day: {
size: number;
day?: {
size?: number;
enabled?: boolean;
};
second: {
size: number;
second?: {
size?: number;
enabled?: boolean;
};
};
}

View File

@@ -4764,6 +4764,11 @@
d3-time-format "4.1.0"
internmap "2.0.3"
"@vvo/tzdb@6.149.0":
version "6.149.0"
resolved "https://registry.yarnpkg.com/@vvo/tzdb/-/tzdb-6.149.0.tgz#e4fcca3c49b90d5910a8679267540cb532809075"
integrity sha512-d68+oW1TE60Ho9FlCDO5Ks4suk6hp5umjNIrtWytVB0B/X0/P1T9yWdnH7EhNb2fx1CQE+MM1qmLUGzT+QAqdw==
"@webassemblyjs/ast@1.12.1":
version "1.12.1"
resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb"

View File

@@ -766,307 +766,6 @@ func buildFilterArrayQuery(_ context.Context, excludeMap map[string]struct{}, pa
return args
}
func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *model.SpanFilterParams) (*model.SpanFiltersResponse, *model.ApiError) {
var query string
excludeMap := make(map[string]struct{})
for _, e := range queryParams.Exclude {
if e == constants.OperationRequest {
excludeMap[constants.OperationDB] = struct{}{}
continue
}
excludeMap[e] = struct{}{}
}
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.TraceID) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
}
if len(queryParams.ServiceName) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
}
if len(queryParams.HttpRoute) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
}
if len(queryParams.HttpHost) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
}
if len(queryParams.HttpMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
}
if len(queryParams.HttpUrl) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
}
if len(queryParams.Operation) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
}
if len(queryParams.RPCMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
}
if len(queryParams.ResponseStatusCode) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
}
if len(queryParams.MinDuration) != 0 {
query = query + " AND durationNano >= @durationNanoMin"
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
}
if len(queryParams.MaxDuration) != 0 {
query = query + " AND durationNano <= @durationNanoMax"
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
}
if len(queryParams.SpanKind) != 0 {
query = query + " AND kind = @kind"
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
}
query = getStatusFilters(query, queryParams.Status, excludeMap)
traceFilterReponse := model.SpanFiltersResponse{
Status: map[string]uint64{},
Duration: map[string]uint64{},
ServiceName: map[string]uint64{},
Operation: map[string]uint64{},
ResponseStatusCode: map[string]uint64{},
RPCMethod: map[string]uint64{},
HttpMethod: map[string]uint64{},
HttpUrl: map[string]uint64{},
HttpRoute: map[string]uint64{},
HttpHost: map[string]uint64{},
}
for _, e := range queryParams.GetFilters {
switch e {
case constants.TraceID:
continue
case constants.ServiceName:
finalQuery := fmt.Sprintf("SELECT serviceName, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY serviceName"
var dBResponse []model.DBResponseServiceName
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.ServiceName != "" {
traceFilterReponse.ServiceName[service.ServiceName] = service.Count
}
}
case constants.HttpRoute:
finalQuery := fmt.Sprintf("SELECT httpRoute, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY httpRoute"
var dBResponse []model.DBResponseHttpRoute
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.HttpRoute != "" {
traceFilterReponse.HttpRoute[service.HttpRoute] = service.Count
}
}
case constants.HttpUrl:
finalQuery := fmt.Sprintf("SELECT httpUrl, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY httpUrl"
var dBResponse []model.DBResponseHttpUrl
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.HttpUrl != "" {
traceFilterReponse.HttpUrl[service.HttpUrl] = service.Count
}
}
case constants.HttpMethod:
finalQuery := fmt.Sprintf("SELECT httpMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY httpMethod"
var dBResponse []model.DBResponseHttpMethod
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.HttpMethod != "" {
traceFilterReponse.HttpMethod[service.HttpMethod] = service.Count
}
}
case constants.HttpHost:
finalQuery := fmt.Sprintf("SELECT httpHost, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY httpHost"
var dBResponse []model.DBResponseHttpHost
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.HttpHost != "" {
traceFilterReponse.HttpHost[service.HttpHost] = service.Count
}
}
case constants.OperationRequest:
finalQuery := fmt.Sprintf("SELECT name, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY name"
var dBResponse []model.DBResponseOperation
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.Operation != "" {
traceFilterReponse.Operation[service.Operation] = service.Count
}
}
case constants.Status:
finalQuery := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = true", r.TraceDB, r.indexTable)
finalQuery += query
var dBResponse []model.DBResponseTotal
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
finalQuery2 := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = false", r.TraceDB, r.indexTable)
finalQuery2 += query
var dBResponse2 []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse2, finalQuery2, args...)
zap.L().Info(finalQuery2)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 && len(dBResponse2) > 0 {
traceFilterReponse.Status = map[string]uint64{"ok": dBResponse2[0].NumTotal, "error": dBResponse[0].NumTotal}
} else if len(dBResponse) > 0 {
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": dBResponse[0].NumTotal}
} else if len(dBResponse2) > 0 {
traceFilterReponse.Status = map[string]uint64{"ok": dBResponse2[0].NumTotal, "error": 0}
} else {
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0}
}
case constants.Duration:
err := r.featureFlags.CheckFeature(constants.DurationSort)
durationSortEnabled := err == nil
finalQuery := ""
if !durationSortEnabled {
// if duration sort is not enabled, we need to get the min and max duration from the index table
finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
var dBResponse []model.DBResponseMinMax
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration = map[string]uint64{"minDuration": dBResponse[0].Min, "maxDuration": dBResponse[0].Max}
}
} else {
// when duration sort is enabled, we need to get the min and max duration from the duration table
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano LIMIT 1"
var dBResponse []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
finalQuery += query
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
var dBResponse2 []model.DBResponseTotal
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
if len(dBResponse) > 0 {
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
}
if len(dBResponse2) > 0 {
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
}
}
case constants.RPCMethod:
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY rpcMethod"
var dBResponse []model.DBResponseRPCMethod
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.RPCMethod != "" {
traceFilterReponse.RPCMethod[service.RPCMethod] = service.Count
}
}
case constants.ResponseStatusCode:
finalQuery := fmt.Sprintf("SELECT responseStatusCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " GROUP BY responseStatusCode"
var dBResponse []model.DBResponseStatusCodeMethod
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
zap.L().Info(finalQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
}
for _, service := range dBResponse {
if service.ResponseStatusCode != "" {
traceFilterReponse.ResponseStatusCode[service.ResponseStatusCode] = service.Count
}
}
default:
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("filter type: %s not supported", e)}
}
}
return &traceFilterReponse, nil
}
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
// status can only be two and if both are selected than they are equivalent to none selected
@@ -1088,140 +787,6 @@ func getStatusFilters(query string, statusParams []string, excludeMap map[string
return query
}
func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError) {
queryTable := fmt.Sprintf("%s.%s", r.TraceDB, r.indexTable)
excludeMap := make(map[string]struct{})
for _, e := range queryParams.Exclude {
if e == constants.OperationRequest {
excludeMap[constants.OperationDB] = struct{}{}
continue
}
excludeMap[e] = struct{}{}
}
var query string
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.TraceID) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
}
if len(queryParams.ServiceName) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
}
if len(queryParams.HttpRoute) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
}
if len(queryParams.HttpHost) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
}
if len(queryParams.HttpMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
}
if len(queryParams.HttpUrl) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
}
if len(queryParams.Operation) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
}
if len(queryParams.RPCMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
}
if len(queryParams.ResponseStatusCode) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
}
if len(queryParams.MinDuration) != 0 {
query = query + " AND durationNano >= @durationNanoMin"
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
}
if len(queryParams.MaxDuration) != 0 {
query = query + " AND durationNano <= @durationNanoMax"
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
}
query = getStatusFilters(query, queryParams.Status, excludeMap)
if len(queryParams.SpanKind) != 0 {
query = query + " AND kind = @kind"
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
}
// create TagQuery from TagQueryParams
tags := createTagQueryFromTagQueryParams(queryParams.Tags)
subQuery, argsSubQuery, errStatus := buildQueryWithTagParams(ctx, tags)
query += subQuery
args = append(args, argsSubQuery...)
if errStatus != nil {
return nil, errStatus
}
if len(queryParams.OrderParam) != 0 {
if queryParams.OrderParam == constants.Duration {
queryTable = fmt.Sprintf("%s.%s", r.TraceDB, r.durationTable)
if queryParams.Order == constants.Descending {
query = query + " ORDER BY durationNano DESC"
}
if queryParams.Order == constants.Ascending {
query = query + " ORDER BY durationNano ASC"
}
} else if queryParams.OrderParam == constants.Timestamp {
projectionOptQuery := "SET allow_experimental_projection_optimization = 1"
err := r.db.Exec(ctx, projectionOptQuery)
zap.L().Info(projectionOptQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
if queryParams.Order == constants.Descending {
query = query + " ORDER BY timestamp DESC"
}
if queryParams.Order == constants.Ascending {
query = query + " ORDER BY timestamp ASC"
}
}
}
if queryParams.Limit > 0 {
query = query + " LIMIT @limit"
args = append(args, clickhouse.Named("limit", queryParams.Limit))
}
if queryParams.Offset > 0 {
query = query + " OFFSET @offset"
args = append(args, clickhouse.Named("offset", queryParams.Offset))
}
var getFilterSpansResponseItems []model.GetFilterSpansResponseItem
baseQuery := fmt.Sprintf("SELECT timestamp, spanID, traceID, serviceName, name, durationNano, httpMethod, rpcMethod, responseStatusCode FROM %s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryTable)
baseQuery += query
err := r.db.Select(ctx, &getFilterSpansResponseItems, baseQuery, args...)
// Fill status and method
for i, e := range getFilterSpansResponseItems {
if e.RPCMethod != "" {
getFilterSpansResponseItems[i].Method = e.RPCMethod
} else {
getFilterSpansResponseItems[i].Method = e.HttpMethod
}
}
zap.L().Info(baseQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
getFilterSpansResponse := model.GetFilterSpansResponse{
Spans: getFilterSpansResponseItems,
TotalSpans: 1000,
}
return &getFilterSpansResponse, nil
}
func createTagQueryFromTagQueryParams(queryParams []model.TagQueryParam) []model.TagQuery {
tags := []model.TagQuery{}
for _, tag := range queryParams {
@@ -1379,87 +944,6 @@ func addExistsOperator(item model.TagQuery, tagMapType string, not bool) (string
return fmt.Sprintf(" AND %s (%s)", notStr, strings.Join(tagOperatorPair, " OR ")), args
}
func (r *ClickHouseReader) GetTagFilters(ctx context.Context, queryParams *model.TagFilterParams) (*model.TagFilters, *model.ApiError) {
excludeMap := make(map[string]struct{})
for _, e := range queryParams.Exclude {
if e == constants.OperationRequest {
excludeMap[constants.OperationDB] = struct{}{}
continue
}
excludeMap[e] = struct{}{}
}
var query string
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.TraceID) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
}
if len(queryParams.ServiceName) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
}
if len(queryParams.HttpRoute) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
}
if len(queryParams.HttpHost) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
}
if len(queryParams.HttpMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
}
if len(queryParams.HttpUrl) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
}
if len(queryParams.Operation) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
}
if len(queryParams.RPCMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
}
if len(queryParams.ResponseStatusCode) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
}
if len(queryParams.MinDuration) != 0 {
query = query + " AND durationNano >= @durationNanoMin"
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
}
if len(queryParams.MaxDuration) != 0 {
query = query + " AND durationNano <= @durationNanoMax"
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
}
if len(queryParams.SpanKind) != 0 {
query = query + " AND kind = @kind"
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
}
query = getStatusFilters(query, queryParams.Status, excludeMap)
tagFilters := []model.TagFilters{}
// Alternative finalQuery := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagMap.keys) as tagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
finalQuery := fmt.Sprintf(`SELECT groupUniqArrayArray(mapKeys(stringTagMap)) as stringTagKeys, groupUniqArrayArray(mapKeys(numberTagMap)) as numberTagKeys, groupUniqArrayArray(mapKeys(boolTagMap)) as boolTagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
finalQuery += query
err := r.db.Select(ctx, &tagFilters, finalQuery, args...)
zap.L().Info(query)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
tagFiltersResult := model.TagFilters{
StringTagKeys: make([]string, 0),
NumberTagKeys: make([]string, 0),
BoolTagKeys: make([]string, 0),
}
if len(tagFilters) != 0 {
tagFiltersResult.StringTagKeys = excludeTags(ctx, tagFilters[0].StringTagKeys)
tagFiltersResult.NumberTagKeys = excludeTags(ctx, tagFilters[0].NumberTagKeys)
tagFiltersResult.BoolTagKeys = excludeTags(ctx, tagFilters[0].BoolTagKeys)
}
return &tagFiltersResult, nil
}
func excludeTags(_ context.Context, tags []string) []string {
excludedTagsMap := map[string]bool{
"http.code": true,
@@ -1483,102 +967,6 @@ func excludeTags(_ context.Context, tags []string) []string {
return newTags
}
func (r *ClickHouseReader) GetTagValues(ctx context.Context, queryParams *model.TagFilterParams) (*model.TagValues, *model.ApiError) {
if queryParams.TagKey.Type == model.TagTypeNumber {
return &model.TagValues{
NumberTagValues: make([]float64, 0),
StringTagValues: make([]string, 0),
BoolTagValues: make([]bool, 0),
}, nil
} else if queryParams.TagKey.Type == model.TagTypeBool {
return &model.TagValues{
NumberTagValues: make([]float64, 0),
StringTagValues: make([]string, 0),
BoolTagValues: []bool{true, false},
}, nil
}
excludeMap := make(map[string]struct{})
for _, e := range queryParams.Exclude {
if e == constants.OperationRequest {
excludeMap[constants.OperationDB] = struct{}{}
continue
}
excludeMap[e] = struct{}{}
}
var query string
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
if len(queryParams.TraceID) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
}
if len(queryParams.ServiceName) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
}
if len(queryParams.HttpRoute) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
}
if len(queryParams.HttpHost) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
}
if len(queryParams.HttpMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
}
if len(queryParams.HttpUrl) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
}
if len(queryParams.Operation) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
}
if len(queryParams.MinDuration) != 0 {
query = query + " AND durationNano >= @durationNanoMin"
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
}
if len(queryParams.MaxDuration) != 0 {
query = query + " AND durationNano <= @durationNanoMax"
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
}
if len(queryParams.SpanKind) != 0 {
query = query + " AND kind = @kind"
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
}
query = getStatusFilters(query, queryParams.Status, excludeMap)
tagValues := []model.TagValues{}
finalQuery := fmt.Sprintf(`SELECT groupArray(DISTINCT stringTagMap[@key]) as stringTagValues FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
finalQuery += query
finalQuery += " LIMIT @limit"
args = append(args, clickhouse.Named("key", queryParams.TagKey.Key))
args = append(args, clickhouse.Named("limit", queryParams.Limit))
err := r.db.Select(ctx, &tagValues, finalQuery, args...)
zap.L().Info(query)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
cleanedTagValues := model.TagValues{
StringTagValues: []string{},
NumberTagValues: []float64{},
BoolTagValues: []bool{},
}
if len(tagValues) == 0 {
return &cleanedTagValues, nil
}
for _, e := range tagValues[0].StringTagValues {
if e != "" {
cleanedTagValues.StringTagValues = append(cleanedTagValues.StringTagValues, e)
}
}
return &cleanedTagValues, nil
}
func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError) {
namedArgs := []interface{}{
@@ -1823,185 +1211,6 @@ func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *
return &response, nil
}
func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, queryParams *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError) {
excludeMap := make(map[string]struct{})
for _, e := range queryParams.Exclude {
if e == constants.OperationRequest {
excludeMap[constants.OperationDB] = struct{}{}
continue
}
excludeMap[e] = struct{}{}
}
SpanAggregatesDBResponseItems := []model.SpanAggregatesDBResponseItem{}
aggregation_query := ""
if queryParams.Dimension == "duration" {
switch queryParams.AggregationOption {
case "p50":
aggregation_query = " quantile(0.50)(durationNano) as float64Value "
case "p95":
aggregation_query = " quantile(0.95)(durationNano) as float64Value "
case "p90":
aggregation_query = " quantile(0.90)(durationNano) as float64Value "
case "p99":
aggregation_query = " quantile(0.99)(durationNano) as float64Value "
case "max":
aggregation_query = " max(durationNano) as value "
case "min":
aggregation_query = " min(durationNano) as value "
case "avg":
aggregation_query = " avg(durationNano) as float64Value "
case "sum":
aggregation_query = " sum(durationNano) as value "
default:
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("aggregate type: %s not supported", queryParams.AggregationOption)}
}
} else if queryParams.Dimension == "calls" {
aggregation_query = " count(*) as value "
}
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
var query string
var customStr []string
_, columnExists := constants.GroupByColMap[queryParams.GroupBy]
// Using %s for groupBy params as it can be a custom column and custom columns are not supported by clickhouse-go yet:
// issue link: https://github.com/ClickHouse/clickhouse-go/issues/870
if queryParams.GroupBy != "" && columnExists {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, queryParams.GroupBy, aggregation_query, r.TraceDB, r.indexTable)
args = append(args, clickhouse.Named("groupByVar", queryParams.GroupBy))
} else if queryParams.GroupBy != "" {
customStr = strings.Split(queryParams.GroupBy, ".(")
if len(customStr) < 2 {
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("GroupBy: %s not supported", queryParams.GroupBy)}
}
if customStr[1] == string(model.TagTypeString)+")" {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, stringTagMap['%s'] as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
} else if customStr[1] == string(model.TagTypeNumber)+")" {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(numberTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
} else if customStr[1] == string(model.TagTypeBool)+")" {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(boolTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
} else {
// return error for unsupported group by
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("GroupBy: %s not supported", queryParams.GroupBy)}
}
} else {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
}
if len(queryParams.TraceID) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
}
if len(queryParams.ServiceName) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
}
if len(queryParams.HttpRoute) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
}
if len(queryParams.HttpHost) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
}
if len(queryParams.HttpMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
}
if len(queryParams.HttpUrl) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
}
if len(queryParams.Operation) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
}
if len(queryParams.RPCMethod) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
}
if len(queryParams.ResponseStatusCode) > 0 {
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
}
if len(queryParams.MinDuration) != 0 {
query = query + " AND durationNano >= @durationNanoMin"
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
}
if len(queryParams.MaxDuration) != 0 {
query = query + " AND durationNano <= @durationNanoMax"
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
}
query = getStatusFilters(query, queryParams.Status, excludeMap)
if len(queryParams.SpanKind) != 0 {
query = query + " AND kind = @kind"
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
}
// create TagQuery from TagQueryParams
tags := createTagQueryFromTagQueryParams(queryParams.Tags)
subQuery, argsSubQuery, errStatus := buildQueryWithTagParams(ctx, tags)
query += subQuery
args = append(args, argsSubQuery...)
if errStatus != nil {
return nil, errStatus
}
if queryParams.GroupBy != "" && columnExists {
query = query + fmt.Sprintf(" GROUP BY time, %s as groupBy ORDER BY time", queryParams.GroupBy)
} else if queryParams.GroupBy != "" {
if customStr[1] == string(model.TagTypeString)+")" {
query = query + fmt.Sprintf(" GROUP BY time, stringTagMap['%s'] as groupBy ORDER BY time", customStr[0])
} else if customStr[1] == string(model.TagTypeNumber)+")" {
query = query + fmt.Sprintf(" GROUP BY time, toString(numberTagMap['%s']) as groupBy ORDER BY time", customStr[0])
} else if customStr[1] == string(model.TagTypeBool)+")" {
query = query + fmt.Sprintf(" GROUP BY time, toString(boolTagMap['%s']) as groupBy ORDER BY time", customStr[0])
}
} else {
query = query + " GROUP BY time ORDER BY time"
}
err := r.db.Select(ctx, &SpanAggregatesDBResponseItems, query, args...)
zap.L().Info(query)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
GetFilteredSpansAggregatesResponse := model.GetFilteredSpansAggregatesResponse{
Items: map[int64]model.SpanAggregatesResponseItem{},
}
for i := range SpanAggregatesDBResponseItems {
if SpanAggregatesDBResponseItems[i].Value == 0 {
SpanAggregatesDBResponseItems[i].Value = uint64(SpanAggregatesDBResponseItems[i].Float64Value)
}
SpanAggregatesDBResponseItems[i].Timestamp = int64(SpanAggregatesDBResponseItems[i].Time.UnixNano())
SpanAggregatesDBResponseItems[i].FloatValue = float32(SpanAggregatesDBResponseItems[i].Value)
if queryParams.AggregationOption == "rate_per_sec" {
SpanAggregatesDBResponseItems[i].FloatValue = float32(SpanAggregatesDBResponseItems[i].Value) / float32(queryParams.StepSeconds)
}
if responseElement, ok := GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp]; !ok {
if queryParams.GroupBy != "" && SpanAggregatesDBResponseItems[i].GroupBy != "" {
GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp] = model.SpanAggregatesResponseItem{
Timestamp: SpanAggregatesDBResponseItems[i].Timestamp,
GroupBy: map[string]float32{SpanAggregatesDBResponseItems[i].GroupBy: SpanAggregatesDBResponseItems[i].FloatValue},
}
} else if queryParams.GroupBy == "" {
GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp] = model.SpanAggregatesResponseItem{
Timestamp: SpanAggregatesDBResponseItems[i].Timestamp,
Value: SpanAggregatesDBResponseItems[i].FloatValue,
}
}
} else {
if queryParams.GroupBy != "" && SpanAggregatesDBResponseItems[i].GroupBy != "" {
responseElement.GroupBy[SpanAggregatesDBResponseItems[i].GroupBy] = SpanAggregatesDBResponseItems[i].FloatValue
}
GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp] = responseElement
}
}
return &GetFilteredSpansAggregatesResponse, nil
}
func getLocalTableName(tableName string) string {
tableNameSplit := strings.Split(tableName, ".")

View File

@@ -526,12 +526,6 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) {
router.HandleFunc("/api/v1/configs", am.OpenAccess(aH.getConfigs)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/health", am.OpenAccess(aH.getHealth)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/getSpanFilters", am.ViewAccess(aH.getSpanFilters)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/getTagFilters", am.ViewAccess(aH.getTagFilters)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/getFilteredSpans", am.ViewAccess(aH.getFilteredSpans)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/getFilteredSpans/aggregates", am.ViewAccess(aH.getFilteredSpanAggregates)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/getTagValues", am.ViewAccess(aH.getTagValues)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/listErrors", am.ViewAccess(aH.listErrors)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/countErrors", am.ViewAccess(aH.countErrors)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/errorFromErrorID", am.ViewAccess(aH.getErrorFromErrorID)).Methods(http.MethodGet)
@@ -1847,86 +1841,6 @@ func (aH *APIHandler) getErrorFromGroupID(w http.ResponseWriter, r *http.Request
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getSpanFilters(w http.ResponseWriter, r *http.Request) {
query, err := parseSpanFilterRequestBody(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, apiErr := aH.reader.GetSpanFilters(r.Context(), query)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getFilteredSpans(w http.ResponseWriter, r *http.Request) {
query, err := parseFilteredSpansRequest(r, aH)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, apiErr := aH.reader.GetFilteredSpans(r.Context(), query)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getFilteredSpanAggregates(w http.ResponseWriter, r *http.Request) {
query, err := parseFilteredSpanAggregatesRequest(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, apiErr := aH.reader.GetFilteredSpansAggregates(r.Context(), query)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getTagFilters(w http.ResponseWriter, r *http.Request) {
query, err := parseTagFilterRequest(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, apiErr := aH.reader.GetTagFilters(r.Context(), query)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getTagValues(w http.ResponseWriter, r *http.Request) {
query, err := parseTagValueRequest(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, apiErr := aH.reader.GetTagValues(r.Context(), query)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
ttlParams, err := parseTTLParams(r)
if aH.HandleError(w, err, http.StatusBadRequest) {

View File

@@ -142,7 +142,7 @@ func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.Attribu
}
// check if the field is present in the fields map
for _, key := range utils.GenerateLogEnrichmentKeys(field) {
for _, key := range utils.GenerateEnrichmentKeys(field) {
if val, ok := fields[key]; ok {
return val
}

View File

@@ -436,8 +436,6 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
} else if panelType == v3.PanelTypeTable {
queryTmplPrefix =
"SELECT"
// step or aggregate interval is whole time period in case of table panel
step = (utils.GetEpochNanoSecs(end) - utils.GetEpochNanoSecs(start)) / NANOSECOND
} else if panelType == v3.PanelTypeGraph || panelType == v3.PanelTypeValue {
// Select the aggregate value for interval
queryTmplPrefix =

View File

@@ -12,6 +12,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
"go.signoz.io/signoz/pkg/query-service/querycache"
"go.signoz.io/signoz/pkg/query-service/utils"
@@ -52,7 +53,8 @@ type querier struct {
returnedSeries []*v3.Series
returnedErr error
UseLogsNewSchema bool
UseLogsNewSchema bool
UseTraceNewSchema bool
}
type QuerierOptions struct {
@@ -308,56 +310,121 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
return results, errQueriesByName, err
}
func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
res := make([]*v3.Result, 0)
qName := ""
pageSize := uint64(0)
limit := uint64(0)
offset := uint64(0)
// se we are considering only one query
for name, v := range params.CompositeQuery.BuilderQueries {
qName = name
pageSize = v.PageSize
// for traces specifically
limit = v.Limit
offset = v.Offset
}
data := []*v3.Row{}
tracesLimit := limit + offset
for _, v := range tsRanges {
params.Start = v.Start
params.End = v.End
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
queries, err := q.builder.PrepareQueries(params)
if err != nil {
return nil, nil, err
}
length := uint64(0)
// this will to run only once
for name, query := range queries {
rowList, err := q.reader.GetListResultV3(ctx, query)
// appending the filter to get the next set of data
if params.CompositeQuery.BuilderQueries[qName].DataSource == v3.DataSourceLogs {
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
queries, err := q.builder.PrepareQueries(params)
if err != nil {
errs := []error{err}
errQuriesByName := map[string]error{
name: err,
}
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
return nil, nil, err
}
for name, query := range queries {
rowList, err := q.reader.GetListResultV3(ctx, query)
if err != nil {
errs := []error{err}
errQueriesByName := map[string]error{
name: err,
}
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
}
length += uint64(len(rowList))
data = append(data, rowList...)
}
data = append(data, rowList...)
}
// append a filter to the params
if len(data) > 0 {
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "id",
IsColumn: true,
DataType: "string",
},
Operator: v3.FilterOperatorLessThan,
Value: data[len(data)-1].Data["id"],
})
}
if length > 0 {
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "id",
IsColumn: true,
DataType: "string",
},
Operator: v3.FilterOperatorLessThan,
Value: data[len(data)-1].Data["id"],
})
}
if uint64(len(data)) >= pageSize {
break
if uint64(len(data)) >= pageSize {
break
}
} else {
// TRACE
// we are updating the offset and limit based on the number of traces we have found in the current timerange
// eg -
// 1)offset = 0, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
//
// if 100 traces are there in [t1, t10] then 100 will return immediately.
// if 10 traces are there in [t1, t10] then we get 10, set offset to 0 and limit to 90, search in the next timerange of [t10, 20]
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with offset=0, limit=100
//
// 2) offset = 50, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
//
// If we find 150 traces with limit=150 and offset=0 in [t1, t10] then we return immediately 100 traces
// If we find 50 in [t1, t10] with limit=150 and offset=0 then it will set limit = 100 and offset=0 and search in the next timerange of [t10, 20]
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with limit=150 and offset=0
// max limit + offset is 10k for pagination
if tracesLimit > constants.TRACE_V4_MAX_PAGINATION_LIMIT {
return nil, nil, fmt.Errorf("maximum traces that can be paginated is 10000")
}
params.CompositeQuery.BuilderQueries[qName].Offset = 0
params.CompositeQuery.BuilderQueries[qName].Limit = tracesLimit
queries, err := q.builder.PrepareQueries(params)
if err != nil {
return nil, nil, err
}
for name, query := range queries {
rowList, err := q.reader.GetListResultV3(ctx, query)
if err != nil {
errs := []error{err}
errQueriesByName := map[string]error{
name: err,
}
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
}
length += uint64(len(rowList))
// skip the traces unless offset is 0
for _, row := range rowList {
if offset == 0 {
data = append(data, row)
} else {
offset--
}
}
}
tracesLimit = tracesLimit - length
if uint64(len(data)) >= limit {
break
}
}
}
res = append(res, &v3.Result{
@@ -368,15 +435,25 @@ func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangePar
}
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
// List query has support for only one query.
if q.UseLogsNewSchema && params.CompositeQuery != nil && len(params.CompositeQuery.BuilderQueries) == 1 {
// List query has support for only one query
// we are skipping for PanelTypeTrace as it has a custom order by regardless of what's in the payload
if params.CompositeQuery != nil &&
len(params.CompositeQuery.BuilderQueries) == 1 &&
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
for _, v := range params.CompositeQuery.BuilderQueries {
if (v.DataSource == v3.DataSourceLogs && !q.UseLogsNewSchema) ||
(v.DataSource == v3.DataSourceTraces && !q.UseTraceNewSchema) {
break
}
// only allow of logs queries with timestamp ordering desc
if v.DataSource == v3.DataSourceLogs && len(v.OrderBy) == 1 && v.OrderBy[0].ColumnName == "timestamp" && v.OrderBy[0].Order == "desc" {
startEndArr := utils.GetLogsListTsRanges(params.Start, params.End)
if len(startEndArr) > 0 {
return q.runLogsListQuery(ctx, params, startEndArr)
}
// TODO(nitya): allow for timestamp asc
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
len(v.OrderBy) == 1 &&
v.OrderBy[0].ColumnName == "timestamp" &&
v.OrderBy[0].Order == "desc" {
startEndArr := utils.GetListTsRanges(params.Start, params.End)
return q.runWindowBasedListQuery(ctx, params, startEndArr)
}
}
}
@@ -408,13 +485,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
close(ch)
var errs []error
errQuriesByName := make(map[string]error)
errQueriesByName := make(map[string]error)
res := make([]*v3.Result, 0)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Err
errQueriesByName[r.Name] = r.Err
continue
}
res = append(res, &v3.Result{
@@ -423,7 +500,7 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
})
}
if len(errs) != 0 {
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
}
return res, nil, nil
}

View File

@@ -5,15 +5,21 @@ import (
"encoding/json"
"fmt"
"math"
"regexp"
"strings"
"testing"
"time"
cmock "github.com/srikanthccv/ClickHouse-go-mock"
"github.com/stretchr/testify/require"
"go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/cache/inmemory"
"go.signoz.io/signoz/pkg/query-service/featureManager"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/querycache"
"go.signoz.io/signoz/pkg/query-service/utils"
)
func minTimestamp(series []*v3.Series) int64 {
@@ -1124,3 +1130,304 @@ func TestQueryRangeValueTypePromQL(t *testing.T) {
}
}
}
type regexMatcher struct {
}
func (m *regexMatcher) Match(expectedSQL, actualSQL string) error {
re, err := regexp.Compile(expectedSQL)
if err != nil {
return err
}
if !re.MatchString(actualSQL) {
return fmt.Errorf("expected query to contain %s, got %s", expectedSQL, actualSQL)
}
return nil
}
func Test_querier_runWindowBasedListQuery(t *testing.T) {
params := &v3.QueryRangeParamsV3{
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeList,
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
QueryName: "A",
Expression: "A",
DataSource: v3.DataSourceTraces,
PageSize: 10,
Limit: 100,
StepInterval: 60,
AggregateOperator: v3.AggregateOperatorNoOp,
SelectColumns: []v3.AttributeKey{{Key: "serviceName"}},
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{},
},
},
},
},
}
tsRanges := []utils.LogsListTsRange{
{
Start: 1722259200000000000, // July 29, 2024 6:50:00 PM
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
},
{
Start: 1722252000000000000, // July 29, 2024 4:50:00 PM
End: 1722259200000000000, // July 29, 2024 6:50:00 PM
},
{
Start: 1722237600000000000, // July 29, 2024 12:50:00 PM
End: 1722252000000000000, // July 29, 2024 4:50:00 PM
},
{
Start: 1722208800000000000, // July 29, 2024 4:50:00 AM
End: 1722237600000000000, // July 29, 2024 12:50:00 PM
},
{
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
End: 1722208800000000000, // July 29, 2024 4:50:00 AM
},
}
type queryParams struct {
start int64
end int64
limit uint64
offset uint64
}
type queryResponse struct {
expectedQuery string
timestamps []uint64
}
// create test struct with moc data i.e array of timestamps, limit, offset and expected results
testCases := []struct {
name string
queryResponses []queryResponse
queryParams queryParams
expectedTimestamps []int64
expectedError bool
}{
{
name: "should return correct timestamps when querying within time window",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 2",
timestamps: []uint64{1722259300000000000, 1722259400000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 2,
offset: 0,
},
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000},
},
{
name: "all data not in first windows",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 3",
timestamps: []uint64{1722259300000000000, 1722259400000000000},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 1",
timestamps: []uint64{1722253000000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 3,
offset: 0,
},
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000},
},
{
name: "data in multiple windows",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 5",
timestamps: []uint64{1722259300000000000, 1722259400000000000},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 3",
timestamps: []uint64{1722253000000000000},
},
{
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 2",
timestamps: []uint64{1722237700000000000},
},
{
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 1",
timestamps: []uint64{},
},
{
expectedQuery: ".*(timestamp >= '1722171576000000000' AND timestamp <= '1722208800000000000').* DESC LIMIT 1",
timestamps: []uint64{},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 5,
offset: 0,
},
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000, 1722237700000000000},
},
{
name: "query with offset",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 7",
timestamps: []uint64{1722259210000000000, 1722259220000000000, 1722259230000000000},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 4",
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
},
{
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 1",
timestamps: []uint64{1722237700000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 4,
offset: 3,
},
expectedTimestamps: []int64{1722253000000000000, 1722254000000000000, 1722255000000000000, 1722237700000000000},
},
{
name: "query with offset and limit- data spread across multiple windows",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 11",
timestamps: []uint64{},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 11",
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
},
{
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 8",
timestamps: []uint64{1722237700000000000, 1722237800000000000, 1722237900000000000, 1722237910000000000, 1722237920000000000},
},
{
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 3",
timestamps: []uint64{1722208810000000000, 1722208820000000000, 1722208830000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 5,
offset: 6,
},
expectedTimestamps: []int64{1722237910000000000, 1722237920000000000, 1722208810000000000, 1722208820000000000, 1722208830000000000},
},
{
name: "don't allow pagination to get more than 10k spans",
queryResponses: []queryResponse{},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 10,
offset: 9991,
},
expectedError: true,
},
}
cols := []cmock.ColumnType{
{Name: "timestamp", Type: "UInt64"},
{Name: "name", Type: "String"},
}
testName := "name"
options := clickhouseReader.NewOptions("", 0, 0, 0, "", "archiveNamespace")
// iterate over test data, create reader and run test
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// Setup mock
mock, err := cmock.NewClickHouseWithQueryMatcher(nil, &regexMatcher{})
require.NoError(t, err, "Failed to create ClickHouse mock")
// Configure mock responses
for _, response := range tc.queryResponses {
values := make([][]any, 0, len(response.timestamps))
for _, ts := range response.timestamps {
values = append(values, []any{&ts, &testName})
}
// if len(values) > 0 {
mock.ExpectQuery(response.expectedQuery).WillReturnRows(
cmock.NewRows(cols, values),
)
// }
}
// Create reader and querier
reader := clickhouseReader.NewReaderFromClickhouseConnection(
mock,
options,
nil,
"",
featureManager.StartManager(),
"",
true,
)
q := &querier{
reader: reader,
builder: queryBuilder.NewQueryBuilder(
queryBuilder.QueryBuilderOptions{
BuildTraceQuery: tracesV3.PrepareTracesQuery,
},
featureManager.StartManager(),
),
}
// Update query parameters
params.Start = tc.queryParams.start
params.End = tc.queryParams.end
params.CompositeQuery.BuilderQueries["A"].Limit = tc.queryParams.limit
params.CompositeQuery.BuilderQueries["A"].Offset = tc.queryParams.offset
// Execute query
results, errMap, err := q.runWindowBasedListQuery(context.Background(), params, tsRanges)
if tc.expectedError {
require.Error(t, err)
return
}
// Assertions
require.NoError(t, err, "Query execution failed")
require.Nil(t, errMap, "Unexpected error map in results")
require.Len(t, results, 1, "Expected exactly one result set")
result := results[0]
require.Equal(t, "A", result.QueryName, "Incorrect query name in results")
require.Len(t, result.List, len(tc.expectedTimestamps),
"Result count mismatch: got %d results, expected %d",
len(result.List), len(tc.expectedTimestamps))
for i, expected := range tc.expectedTimestamps {
require.Equal(t, expected, result.List[i].Timestamp.UnixNano(),
"Timestamp mismatch at index %d: got %d, expected %d",
i, result.List[i].Timestamp.UnixNano(), expected)
}
// Verify mock expectations
err = mock.ExpectationsWereMet()
require.NoError(t, err, "Mock expectations were not met")
})
}
}

View File

@@ -12,6 +12,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
"go.signoz.io/signoz/pkg/query-service/querycache"
"go.signoz.io/signoz/pkg/query-service/utils"
@@ -48,10 +49,11 @@ type querier struct {
testingMode bool
queriesExecuted []string
// tuple of start and end time in milliseconds
timeRanges [][]int
returnedSeries []*v3.Series
returnedErr error
UseLogsNewSchema bool
timeRanges [][]int
returnedSeries []*v3.Series
returnedErr error
UseLogsNewSchema bool
UseTraceNewSchema bool
}
type QuerierOptions struct {
@@ -308,56 +310,121 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
return results, errQueriesByName, err
}
func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
res := make([]*v3.Result, 0)
qName := ""
pageSize := uint64(0)
limit := uint64(0)
offset := uint64(0)
// se we are considering only one query
for name, v := range params.CompositeQuery.BuilderQueries {
qName = name
pageSize = v.PageSize
// for traces specifically
limit = v.Limit
offset = v.Offset
}
data := []*v3.Row{}
tracesLimit := limit + offset
for _, v := range tsRanges {
params.Start = v.Start
params.End = v.End
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
queries, err := q.builder.PrepareQueries(params)
if err != nil {
return nil, nil, err
}
length := uint64(0)
// this will to run only once
for name, query := range queries {
rowList, err := q.reader.GetListResultV3(ctx, query)
// appending the filter to get the next set of data
if params.CompositeQuery.BuilderQueries[qName].DataSource == v3.DataSourceLogs {
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
queries, err := q.builder.PrepareQueries(params)
if err != nil {
errs := []error{err}
errQuriesByName := map[string]error{
name: err,
}
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
return nil, nil, err
}
for name, query := range queries {
rowList, err := q.reader.GetListResultV3(ctx, query)
if err != nil {
errs := []error{err}
errQueriesByName := map[string]error{
name: err,
}
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
}
length += uint64(len(rowList))
data = append(data, rowList...)
}
data = append(data, rowList...)
}
// append a filter to the params
if len(data) > 0 {
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "id",
IsColumn: true,
DataType: "string",
},
Operator: v3.FilterOperatorLessThan,
Value: data[len(data)-1].Data["id"],
})
}
if length > 0 {
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "id",
IsColumn: true,
DataType: "string",
},
Operator: v3.FilterOperatorLessThan,
Value: data[len(data)-1].Data["id"],
})
}
if uint64(len(data)) >= pageSize {
break
if uint64(len(data)) >= pageSize {
break
}
} else {
// TRACE
// we are updating the offset and limit based on the number of traces we have found in the current timerange
// eg -
// 1)offset = 0, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
//
// if 100 traces are there in [t1, t10] then 100 will return immediately.
// if 10 traces are there in [t1, t10] then we get 10, set offset to 0 and limit to 90, search in the next timerange of [t10, 20]
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with offset=0, limit=100
//
// 2) offset = 50, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
//
// If we find 150 traces with limit=150 and offset=0 in [t1, t10] then we return immediately 100 traces
// If we find 50 in [t1, t10] with limit=150 and offset=0 then it will set limit = 100 and offset=0 and search in the next timerange of [t10, 20]
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with limit=150 and offset=0
// max limit + offset is 10k for pagination
if tracesLimit > constants.TRACE_V4_MAX_PAGINATION_LIMIT {
return nil, nil, fmt.Errorf("maximum traces that can be paginated is 10000")
}
params.CompositeQuery.BuilderQueries[qName].Offset = 0
params.CompositeQuery.BuilderQueries[qName].Limit = tracesLimit
queries, err := q.builder.PrepareQueries(params)
if err != nil {
return nil, nil, err
}
for name, query := range queries {
rowList, err := q.reader.GetListResultV3(ctx, query)
if err != nil {
errs := []error{err}
errQueriesByName := map[string]error{
name: err,
}
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
}
length += uint64(len(rowList))
// skip the traces unless offset is 0
for _, row := range rowList {
if offset == 0 {
data = append(data, row)
} else {
offset--
}
}
}
tracesLimit = tracesLimit - length
if uint64(len(data)) >= limit {
break
}
}
}
res = append(res, &v3.Result{
@@ -369,14 +436,24 @@ func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangePar
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
// List query has support for only one query.
if q.UseLogsNewSchema && params.CompositeQuery != nil && len(params.CompositeQuery.BuilderQueries) == 1 {
// we are skipping for PanelTypeTrace as it has a custom order by regardless of what's in the payload
if params.CompositeQuery != nil &&
len(params.CompositeQuery.BuilderQueries) == 1 &&
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
for _, v := range params.CompositeQuery.BuilderQueries {
if (v.DataSource == v3.DataSourceLogs && !q.UseLogsNewSchema) ||
(v.DataSource == v3.DataSourceTraces && !q.UseTraceNewSchema) {
break
}
// only allow of logs queries with timestamp ordering desc
if v.DataSource == v3.DataSourceLogs && len(v.OrderBy) == 1 && v.OrderBy[0].ColumnName == "timestamp" && v.OrderBy[0].Order == "desc" {
startEndArr := utils.GetLogsListTsRanges(params.Start, params.End)
if len(startEndArr) > 0 {
return q.runLogsListQuery(ctx, params, startEndArr)
}
// TODO(nitya): allow for timestamp asc
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
len(v.OrderBy) == 1 &&
v.OrderBy[0].ColumnName == "timestamp" &&
v.OrderBy[0].Order == "desc" {
startEndArr := utils.GetListTsRanges(params.Start, params.End)
return q.runWindowBasedListQuery(ctx, params, startEndArr)
}
}
}
@@ -416,13 +493,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
close(ch)
var errs []error
errQuriesByName := make(map[string]error)
errQueriesByName := make(map[string]error)
res := make([]*v3.Result, 0)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Err
errQueriesByName[r.Name] = r.Err
continue
}
res = append(res, &v3.Result{
@@ -431,7 +508,7 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
})
}
if len(errs) != 0 {
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
}
return res, nil, nil
}

View File

@@ -5,15 +5,21 @@ import (
"encoding/json"
"fmt"
"math"
"regexp"
"strings"
"testing"
"time"
cmock "github.com/srikanthccv/ClickHouse-go-mock"
"github.com/stretchr/testify/require"
"go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/cache/inmemory"
"go.signoz.io/signoz/pkg/query-service/featureManager"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/querycache"
"go.signoz.io/signoz/pkg/query-service/utils"
)
func minTimestamp(series []*v3.Series) int64 {
@@ -798,8 +804,8 @@ func TestV2QueryRangeValueType(t *testing.T) {
}
q := NewQuerier(opts)
expectedTimeRangeInQueryString := []string{
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000, 1675115580000+120*60*1000), // 31st Jan, 03:23:00 to 31st Jan, 05:23:00
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115580000+120*60*1000, 1675115580000+180*60*1000), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000, 1675115580000+120*60*1000), // 31st Jan, 03:23:00 to 31st Jan, 05:23:00
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115580000+120*60*1000, 1675115580000+180*60*1000), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675119196722)*int64(1000000), (1675126396722)*int64(1000000)), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
}
@@ -1178,3 +1184,304 @@ func TestV2QueryRangeValueTypePromQL(t *testing.T) {
}
}
}
type regexMatcher struct {
}
func (m *regexMatcher) Match(expectedSQL, actualSQL string) error {
re, err := regexp.Compile(expectedSQL)
if err != nil {
return err
}
if !re.MatchString(actualSQL) {
return fmt.Errorf("expected query to contain %s, got %s", expectedSQL, actualSQL)
}
return nil
}
func Test_querier_runWindowBasedListQuery(t *testing.T) {
params := &v3.QueryRangeParamsV3{
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeList,
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
QueryName: "A",
Expression: "A",
DataSource: v3.DataSourceTraces,
PageSize: 10,
Limit: 100,
StepInterval: 60,
AggregateOperator: v3.AggregateOperatorNoOp,
SelectColumns: []v3.AttributeKey{{Key: "serviceName"}},
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{},
},
},
},
},
}
tsRanges := []utils.LogsListTsRange{
{
Start: 1722259200000000000, // July 29, 2024 6:50:00 PM
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
},
{
Start: 1722252000000000000, // July 29, 2024 4:50:00 PM
End: 1722259200000000000, // July 29, 2024 6:50:00 PM
},
{
Start: 1722237600000000000, // July 29, 2024 12:50:00 PM
End: 1722252000000000000, // July 29, 2024 4:50:00 PM
},
{
Start: 1722208800000000000, // July 29, 2024 4:50:00 AM
End: 1722237600000000000, // July 29, 2024 12:50:00 PM
},
{
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
End: 1722208800000000000, // July 29, 2024 4:50:00 AM
},
}
type queryParams struct {
start int64
end int64
limit uint64
offset uint64
}
type queryResponse struct {
expectedQuery string
timestamps []uint64
}
// create test struct with moc data i.e array of timestamps, limit, offset and expected results
testCases := []struct {
name string
queryResponses []queryResponse
queryParams queryParams
expectedTimestamps []int64
expectedError bool
}{
{
name: "should return correct timestamps when querying within time window",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 2",
timestamps: []uint64{1722259300000000000, 1722259400000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 2,
offset: 0,
},
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000},
},
{
name: "all data not in first windows",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 3",
timestamps: []uint64{1722259300000000000, 1722259400000000000},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 1",
timestamps: []uint64{1722253000000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 3,
offset: 0,
},
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000},
},
{
name: "data in multiple windows",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 5",
timestamps: []uint64{1722259300000000000, 1722259400000000000},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 3",
timestamps: []uint64{1722253000000000000},
},
{
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 2",
timestamps: []uint64{1722237700000000000},
},
{
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 1",
timestamps: []uint64{},
},
{
expectedQuery: ".*(timestamp >= '1722171576000000000' AND timestamp <= '1722208800000000000').* DESC LIMIT 1",
timestamps: []uint64{},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 5,
offset: 0,
},
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000, 1722237700000000000},
},
{
name: "query with offset",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 7",
timestamps: []uint64{1722259210000000000, 1722259220000000000, 1722259230000000000},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 4",
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
},
{
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 1",
timestamps: []uint64{1722237700000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 4,
offset: 3,
},
expectedTimestamps: []int64{1722253000000000000, 1722254000000000000, 1722255000000000000, 1722237700000000000},
},
{
name: "query with offset and limit- data spread across multiple windows",
queryResponses: []queryResponse{
{
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 11",
timestamps: []uint64{},
},
{
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 11",
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
},
{
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 8",
timestamps: []uint64{1722237700000000000, 1722237800000000000, 1722237900000000000, 1722237910000000000, 1722237920000000000},
},
{
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 3",
timestamps: []uint64{1722208810000000000, 1722208820000000000, 1722208830000000000},
},
},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 5,
offset: 6,
},
expectedTimestamps: []int64{1722237910000000000, 1722237920000000000, 1722208810000000000, 1722208820000000000, 1722208830000000000},
},
{
name: "don't allow pagination to get more than 10k spans",
queryResponses: []queryResponse{},
queryParams: queryParams{
start: 1722171576000000000,
end: 1722262800000000000,
limit: 10,
offset: 9991,
},
expectedError: true,
},
}
cols := []cmock.ColumnType{
{Name: "timestamp", Type: "UInt64"},
{Name: "name", Type: "String"},
}
testName := "name"
options := clickhouseReader.NewOptions("", 0, 0, 0, "", "archiveNamespace")
// iterate over test data, create reader and run test
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// Setup mock
mock, err := cmock.NewClickHouseWithQueryMatcher(nil, &regexMatcher{})
require.NoError(t, err, "Failed to create ClickHouse mock")
// Configure mock responses
for _, response := range tc.queryResponses {
values := make([][]any, 0, len(response.timestamps))
for _, ts := range response.timestamps {
values = append(values, []any{&ts, &testName})
}
// if len(values) > 0 {
mock.ExpectQuery(response.expectedQuery).WillReturnRows(
cmock.NewRows(cols, values),
)
// }
}
// Create reader and querier
reader := clickhouseReader.NewReaderFromClickhouseConnection(
mock,
options,
nil,
"",
featureManager.StartManager(),
"",
true,
)
q := &querier{
reader: reader,
builder: queryBuilder.NewQueryBuilder(
queryBuilder.QueryBuilderOptions{
BuildTraceQuery: tracesV3.PrepareTracesQuery,
},
featureManager.StartManager(),
),
}
// Update query parameters
params.Start = tc.queryParams.start
params.End = tc.queryParams.end
params.CompositeQuery.BuilderQueries["A"].Limit = tc.queryParams.limit
params.CompositeQuery.BuilderQueries["A"].Offset = tc.queryParams.offset
// Execute query
results, errMap, err := q.runWindowBasedListQuery(context.Background(), params, tsRanges)
if tc.expectedError {
require.Error(t, err)
return
}
// Assertions
require.NoError(t, err, "Query execution failed")
require.Nil(t, errMap, "Unexpected error map in results")
require.Len(t, results, 1, "Expected exactly one result set")
result := results[0]
require.Equal(t, "A", result.QueryName, "Incorrect query name in results")
require.Len(t, result.List, len(tc.expectedTimestamps),
"Result count mismatch: got %d results, expected %d",
len(result.List), len(tc.expectedTimestamps))
for i, expected := range tc.expectedTimestamps {
require.Equal(t, expected, result.List[i].Timestamp.UnixNano(),
"Timestamp mismatch at index %d: got %d, expected %d",
i, result.List[i].Timestamp.UnixNano(), expected)
}
// Verify mock expectations
err = mock.ExpectationsWereMet()
require.NoError(t, err, "Mock expectations were not met")
})
}
}

View File

@@ -384,6 +384,11 @@ func LogCommentEnricher(next http.Handler) http.Handler {
client = "api"
}
email, err := auth.GetEmailFromJwt(r.Context())
if err != nil {
zap.S().Errorf("error while getting email from jwt: %v", err)
}
kvs := map[string]string{
"path": path,
"dashboardID": dashboardID,
@@ -392,6 +397,7 @@ func LogCommentEnricher(next http.Handler) http.Handler {
"client": client,
"viewName": viewName,
"servicesTab": tab,
"email": email,
}
r = r.WithContext(context.WithValue(r.Context(), common.LogCommentKey, kvs))

View File

@@ -10,7 +10,7 @@ import (
"go.signoz.io/signoz/pkg/query-service/utils"
)
var aggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
var AggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
v3.AggregateOperatorP05: 0.05,
v3.AggregateOperatorP10: 0.10,
v3.AggregateOperatorP20: 0.20,
@@ -22,7 +22,7 @@ var aggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
v3.AggregateOperatorP99: 0.99,
}
var aggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{
var AggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{
v3.AggregateOperatorAvg: "avg",
v3.AggregateOperatorMax: "max",
v3.AggregateOperatorMin: "min",
@@ -109,7 +109,7 @@ func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.Attri
return selectLabels
}
func getSelectKeys(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey) string {
func GetSelectKeys(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey) string {
var selectLabels []string
if aggregatorOperator == v3.AggregateOperatorNoOp {
return ""
@@ -173,7 +173,7 @@ func buildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
conditions = append(conditions, fmt.Sprintf(operator, columnName, fmtVal))
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
if item.Key.IsColumn {
subQuery, err := existsSubQueryForFixedColumn(item.Key, item.Operator)
subQuery, err := ExistsSubQueryForFixedColumn(item.Key, item.Operator)
if err != nil {
return "", err
}
@@ -199,7 +199,7 @@ func buildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
return queryString, nil
}
func existsSubQueryForFixedColumn(key v3.AttributeKey, op v3.FilterOperator) (string, error) {
func ExistsSubQueryForFixedColumn(key v3.AttributeKey, op v3.FilterOperator) (string, error) {
if key.DataType == v3.AttributeKeyDataTypeString {
if op == v3.FilterOperatorExists {
return fmt.Sprintf("%s %s ''", key.Key, tracesOperatorMappingV3[v3.FilterOperatorNotEqual]), nil
@@ -244,7 +244,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
selectLabels := getSelectLabels(mq.AggregateOperator, mq.GroupBy)
having := having(mq.Having)
having := Having(mq.Having)
if having != "" {
having = " having " + having
}
@@ -272,7 +272,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
// we don't need value for first query
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
queryTmpl = "SELECT " + getSelectKeys(mq.AggregateOperator, mq.GroupBy) + " from (" + queryTmpl + ")"
queryTmpl = "SELECT " + GetSelectKeys(mq.AggregateOperator, mq.GroupBy) + " from (" + queryTmpl + ")"
}
emptyValuesInGroupByFilter, err := handleEmptyValuesInGroupBy(mq.GroupBy)
@@ -281,7 +281,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
}
filterSubQuery += emptyValuesInGroupByFilter
groupBy := groupByAttributeKeyTags(panelType, options.GraphLimitQtype, mq.GroupBy...)
groupBy := GroupByAttributeKeyTags(panelType, options.GraphLimitQtype, mq.GroupBy...)
if groupBy != "" {
groupBy = " group by " + groupBy
}
@@ -291,7 +291,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
}
if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", getSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "%s)"
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", GetSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "%s)"
}
aggregationKey := ""
@@ -311,7 +311,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
rate = rate / 60.0
}
op := fmt.Sprintf("%s(%s)/%f", aggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
op := fmt.Sprintf("%s(%s)/%f", AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case
@@ -324,17 +324,17 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
v3.AggregateOperatorP90,
v3.AggregateOperatorP95,
v3.AggregateOperatorP99:
op := fmt.Sprintf("quantile(%v)(%s)", aggregateOperatorToPercentile[mq.AggregateOperator], aggregationKey)
op := fmt.Sprintf("quantile(%v)(%s)", AggregateOperatorToPercentile[mq.AggregateOperator], aggregationKey)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case v3.AggregateOperatorAvg, v3.AggregateOperatorSum, v3.AggregateOperatorMin, v3.AggregateOperatorMax:
op := fmt.Sprintf("%s(%s)", aggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey)
op := fmt.Sprintf("%s(%s)", AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case v3.AggregateOperatorCount:
if mq.AggregateAttribute.Key != "" {
if mq.AggregateAttribute.IsColumn {
subQuery, err := existsSubQueryForFixedColumn(mq.AggregateAttribute, v3.FilterOperatorExists)
subQuery, err := ExistsSubQueryForFixedColumn(mq.AggregateAttribute, v3.FilterOperatorExists)
if err == nil {
filterSubQuery = fmt.Sprintf("%s AND %s", filterSubQuery, subQuery)
}
@@ -354,9 +354,9 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
var query string
if panelType == v3.PanelTypeTrace {
withSubQuery := fmt.Sprintf(constants.TracesExplorerViewSQLSelectWithSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_LOCAL_TABLENAME, spanIndexTableTimeFilter, filterSubQuery)
withSubQuery = addLimitToQuery(withSubQuery, mq.Limit)
withSubQuery = AddLimitToQuery(withSubQuery, mq.Limit)
if mq.Offset != 0 {
withSubQuery = addOffsetToQuery(withSubQuery, mq.Offset)
withSubQuery = AddOffsetToQuery(withSubQuery, mq.Offset)
}
// query = withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME)
query = fmt.Sprintf(constants.TracesExplorerViewSQLSelectBeforeSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME) + withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectAfterSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, spanIndexTableTimeFilter)
@@ -403,7 +403,7 @@ func groupBy(panelType v3.PanelType, graphLimitQtype string, tags ...string) str
return strings.Join(tags, ",")
}
func groupByAttributeKeyTags(panelType v3.PanelType, graphLimitQtype string, tags ...v3.AttributeKey) string {
func GroupByAttributeKeyTags(panelType v3.PanelType, graphLimitQtype string, tags ...v3.AttributeKey) string {
groupTags := []string{}
for _, tag := range tags {
groupTags = append(groupTags, fmt.Sprintf("`%s`", tag.Key))
@@ -456,7 +456,7 @@ func orderByAttributeKeyTags(panelType v3.PanelType, items []v3.OrderBy, tags []
return str
}
func having(items []v3.Having) string {
func Having(items []v3.Having) string {
// aggregate something and filter on that aggregate
var having []string
for _, item := range items {
@@ -465,7 +465,7 @@ func having(items []v3.Having) string {
return strings.Join(having, " AND ")
}
func reduceToQuery(query string, reduceTo v3.ReduceToOperator, _ v3.AggregateOperator) (string, error) {
func ReduceToQuery(query string, reduceTo v3.ReduceToOperator, _ v3.AggregateOperator) (string, error) {
var groupBy string
switch reduceTo {
@@ -485,14 +485,14 @@ func reduceToQuery(query string, reduceTo v3.ReduceToOperator, _ v3.AggregateOpe
return query, nil
}
func addLimitToQuery(query string, limit uint64) string {
func AddLimitToQuery(query string, limit uint64) string {
if limit == 0 {
limit = 100
}
return fmt.Sprintf("%s LIMIT %d", query, limit)
}
func addOffsetToQuery(query string, offset uint64) string {
func AddOffsetToQuery(query string, offset uint64) string {
return fmt.Sprintf("%s OFFSET %d", query, offset)
}
@@ -513,7 +513,7 @@ func PrepareTracesQuery(start, end int64, panelType v3.PanelType, mq *v3.Builder
if err != nil {
return "", err
}
query = addLimitToQuery(query, mq.Limit)
query = AddLimitToQuery(query, mq.Limit)
return query, nil
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
@@ -529,13 +529,13 @@ func PrepareTracesQuery(start, end int64, panelType v3.PanelType, mq *v3.Builder
return "", err
}
if panelType == v3.PanelTypeValue {
query, err = reduceToQuery(query, mq.ReduceTo, mq.AggregateOperator)
query, err = ReduceToQuery(query, mq.ReduceTo, mq.AggregateOperator)
}
if panelType == v3.PanelTypeList || panelType == v3.PanelTypeTable {
query = addLimitToQuery(query, mq.Limit)
query = AddLimitToQuery(query, mq.Limit)
if mq.Offset != 0 {
query = addOffsetToQuery(query, mq.Offset)
query = AddOffsetToQuery(query, mq.Offset)
}
}
return query, err

View File

@@ -0,0 +1,118 @@
package v4
import (
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
)
// if the field is timestamp/id/value we don't need to enrich
// if the field is static we don't need to enrich
// for all others we need to enrich
// an attribute/resource can be materialized/dematerialized
// but the query should work regardless and shouldn't fail
func isEnriched(field v3.AttributeKey) bool {
// if it is timestamp/id dont check
if field.Key == "timestamp" || field.Key == constants.SigNozOrderByValue {
return true
}
// we need to check if the field is static and return false if isColumn is not set
if _, ok := constants.StaticFieldsTraces[field.Key]; ok && field.IsColumn {
return true
}
return false
}
func enrichKeyWithMetadata(key v3.AttributeKey, keys map[string]v3.AttributeKey) v3.AttributeKey {
if isEnriched(key) {
return key
}
if v, ok := constants.StaticFieldsTraces[key.Key]; ok {
return v
}
for _, key := range utils.GenerateEnrichmentKeys(key) {
if val, ok := keys[key]; ok {
return val
}
}
// enrich with default values if metadata is not found
if key.Type == "" {
key.Type = v3.AttributeKeyTypeTag
}
if key.DataType == "" {
key.DataType = v3.AttributeKeyDataTypeString
}
return key
}
func Enrich(params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) {
if params.CompositeQuery.QueryType != v3.QueryTypeBuilder {
return
}
for _, query := range params.CompositeQuery.BuilderQueries {
if query.DataSource == v3.DataSourceTraces {
EnrichTracesQuery(query, keys)
}
}
}
func EnrichTracesQuery(query *v3.BuilderQuery, keys map[string]v3.AttributeKey) {
// enrich aggregate attribute
query.AggregateAttribute = enrichKeyWithMetadata(query.AggregateAttribute, keys)
// enrich filter items
if query.Filters != nil && len(query.Filters.Items) > 0 {
for idx, filter := range query.Filters.Items {
query.Filters.Items[idx].Key = enrichKeyWithMetadata(filter.Key, keys)
// if the serviceName column is used, use the corresponding resource attribute as well during filtering
// since there is only one of these resource attributes we are adding it here directly.
// move it somewhere else if this list is big
if filter.Key.Key == "serviceName" {
query.Filters.Items[idx].Key = v3.AttributeKey{
Key: "service.name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
IsColumn: false,
}
}
}
}
// enrich group by
for idx, groupBy := range query.GroupBy {
query.GroupBy[idx] = enrichKeyWithMetadata(groupBy, keys)
}
// enrich order by
query.OrderBy = enrichOrderBy(query.OrderBy, keys)
// enrich select columns
for idx, selectColumn := range query.SelectColumns {
query.SelectColumns[idx] = enrichKeyWithMetadata(selectColumn, keys)
}
}
func enrichOrderBy(items []v3.OrderBy, keys map[string]v3.AttributeKey) []v3.OrderBy {
enrichedItems := []v3.OrderBy{}
for i := 0; i < len(items); i++ {
attributeKey := enrichKeyWithMetadata(v3.AttributeKey{
Key: items[i].ColumnName,
}, keys)
enrichedItems = append(enrichedItems, v3.OrderBy{
ColumnName: items[i].ColumnName,
Order: items[i].Order,
Key: attributeKey.Key,
DataType: attributeKey.DataType,
Type: attributeKey.Type,
IsColumn: attributeKey.IsColumn,
})
}
return enrichedItems
}

View File

@@ -0,0 +1,196 @@
package v4
import (
"reflect"
"testing"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
func TestEnrichTracesQuery(t *testing.T) {
type args struct {
query *v3.BuilderQuery
keys map[string]v3.AttributeKey
want *v3.BuilderQuery
}
tests := []struct {
name string
args args
}{
{
name: "test 1",
args: args{
query: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag}, Value: 100, Operator: ">"},
},
},
OrderBy: []v3.OrderBy{},
},
keys: map[string]v3.AttributeKey{
"bytes##tag##int64": {Key: "bytes", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag},
},
want: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64}, Value: 100, Operator: ">"},
},
},
OrderBy: []v3.OrderBy{},
},
},
},
{
name: "test service name",
args: args{
query: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "serviceName", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "myservice", Operator: "="},
{Key: v3.AttributeKey{Key: "serviceName"}, Value: "myservice", Operator: "="},
},
},
OrderBy: []v3.OrderBy{},
},
keys: map[string]v3.AttributeKey{},
want: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service.name", Type: v3.AttributeKeyTypeResource, DataType: v3.AttributeKeyDataTypeString}, Value: "myservice", Operator: "="},
{Key: v3.AttributeKey{Key: "service.name", Type: v3.AttributeKeyTypeResource, DataType: v3.AttributeKeyDataTypeString}, Value: "myservice", Operator: "="},
},
},
OrderBy: []v3.OrderBy{},
},
},
},
{
name: "test mat attrs",
args: args{
query: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "/api", Operator: "="},
{Key: v3.AttributeKey{Key: "msgSystem"}, Value: "name", Operator: "="},
{Key: v3.AttributeKey{Key: "external_http_url"}, Value: "name", Operator: "="},
},
},
OrderBy: []v3.OrderBy{},
},
keys: map[string]v3.AttributeKey{},
want: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "/api", Operator: "="},
{Key: v3.AttributeKey{Key: "msgSystem", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "name", Operator: "="},
{Key: v3.AttributeKey{Key: "external_http_url", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "name", Operator: "="},
},
},
OrderBy: []v3.OrderBy{},
},
},
},
{
name: "test aggregateattr, filter, groupby, order by",
args: args{
query: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorCount,
AggregateAttribute: v3.AttributeKey{
Key: "http.route",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
},
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString}, Value: "/api", Operator: "="},
},
},
GroupBy: []v3.AttributeKey{
{Key: "http.route", DataType: v3.AttributeKeyDataTypeString},
{Key: "msgSystem", DataType: v3.AttributeKeyDataTypeString},
},
OrderBy: []v3.OrderBy{
{ColumnName: "httpRoute", Order: v3.DirectionAsc},
},
},
keys: map[string]v3.AttributeKey{
"http.route##tag##string": {Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
},
want: &v3.BuilderQuery{
AggregateAttribute: v3.AttributeKey{
Key: "http.route",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeTag,
IsColumn: true,
},
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "/api", Operator: "="},
},
},
GroupBy: []v3.AttributeKey{
{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
{Key: "msgSystem", DataType: v3.AttributeKeyDataTypeString, IsJSON: false, IsColumn: true},
},
OrderBy: []v3.OrderBy{
{Key: "httpRoute", Order: v3.DirectionAsc, ColumnName: "httpRoute", DataType: v3.AttributeKeyDataTypeString, IsColumn: true},
},
},
},
},
{
name: "enrich default values",
args: args{
query: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "testattr"}},
},
},
OrderBy: []v3.OrderBy{{ColumnName: "timestamp", Order: v3.DirectionAsc}},
},
keys: map[string]v3.AttributeKey{},
want: &v3.BuilderQuery{
Filters: &v3.FilterSet{
Items: []v3.FilterItem{{Key: v3.AttributeKey{Key: "testattr", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeString}}},
},
// isColumn won't matter in timestamp as it will always be a column
OrderBy: []v3.OrderBy{{Key: "timestamp", Order: v3.DirectionAsc, ColumnName: "timestamp"}},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
EnrichTracesQuery(tt.args.query, tt.args.keys)
// Check AggregateAttribute
if tt.args.query.AggregateAttribute.Key != "" && !reflect.DeepEqual(tt.args.query.AggregateAttribute, tt.args.want.AggregateAttribute) {
t.Errorf("EnrichTracesQuery() AggregateAttribute = %v, want %v", tt.args.query.AggregateAttribute, tt.args.want.AggregateAttribute)
}
// Check Filters
if tt.args.query.Filters != nil && !reflect.DeepEqual(tt.args.query.Filters, tt.args.want.Filters) {
t.Errorf("EnrichTracesQuery() Filters = %v, want %v", tt.args.query.Filters, tt.args.want.Filters)
}
// Check GroupBy
if tt.args.query.GroupBy != nil && !reflect.DeepEqual(tt.args.query.GroupBy, tt.args.want.GroupBy) {
t.Errorf("EnrichTracesQuery() GroupBy = %v, want %v", tt.args.query.GroupBy, tt.args.want.GroupBy)
}
// Check OrderBy
if tt.args.query.OrderBy != nil && !reflect.DeepEqual(tt.args.query.OrderBy, tt.args.want.OrderBy) {
t.Errorf("EnrichTracesQuery() OrderBy = %v, want %v", tt.args.query.OrderBy, tt.args.want.OrderBy)
}
})
}
}

View File

@@ -0,0 +1,414 @@
package v4
import (
"fmt"
"strings"
"go.signoz.io/signoz/pkg/query-service/app/resource"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils"
)
const NANOSECOND = 1000000000
var tracesOperatorMappingV3 = map[v3.FilterOperator]string{
v3.FilterOperatorIn: "IN",
v3.FilterOperatorNotIn: "NOT IN",
v3.FilterOperatorEqual: "=",
v3.FilterOperatorNotEqual: "!=",
v3.FilterOperatorLessThan: "<",
v3.FilterOperatorLessThanOrEq: "<=",
v3.FilterOperatorGreaterThan: ">",
v3.FilterOperatorGreaterThanOrEq: ">=",
v3.FilterOperatorLike: "ILIKE",
v3.FilterOperatorNotLike: "NOT ILIKE",
v3.FilterOperatorRegex: "match(%s, %s)",
v3.FilterOperatorNotRegex: "NOT match(%s, %s)",
v3.FilterOperatorContains: "ILIKE",
v3.FilterOperatorNotContains: "NOT ILIKE",
v3.FilterOperatorExists: "mapContains(%s, '%s')",
v3.FilterOperatorNotExists: "NOT mapContains(%s, '%s')",
}
func getClickHouseTracesColumnType(columnType v3.AttributeKeyType) string {
if columnType == v3.AttributeKeyTypeResource {
return "resources"
}
return "attributes"
}
func getClickHouseTracesColumnDataType(columnDataType v3.AttributeKeyDataType) string {
if columnDataType == v3.AttributeKeyDataTypeFloat64 || columnDataType == v3.AttributeKeyDataTypeInt64 {
return "number"
}
if columnDataType == v3.AttributeKeyDataTypeBool {
return "bool"
}
return "string"
}
func getColumnName(key v3.AttributeKey) string {
// if key present in static return as it is
if _, ok := constants.StaticFieldsTraces[key.Key]; ok {
return key.Key
}
if !key.IsColumn {
keyType := getClickHouseTracesColumnType(key.Type)
keyDType := getClickHouseTracesColumnDataType(key.DataType)
return fmt.Sprintf("%s_%s['%s']", keyType, keyDType, key.Key)
}
return "`" + utils.GetClickhouseColumnNameV2(string(key.Type), string(key.DataType), key.Key) + "`"
}
// getSelectLabels returns the select labels for the query based on groupBy and aggregateOperator
func getSelectLabels(groupBy []v3.AttributeKey) string {
var labels []string
for _, tag := range groupBy {
name := getColumnName(tag)
labels = append(labels, fmt.Sprintf(" %s as `%s`", name, tag.Key))
}
return strings.Join(labels, ",")
}
func buildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
var conditions []string
if fs != nil && len(fs.Items) != 0 {
for _, item := range fs.Items {
// skip if it's a resource attribute
if item.Key.Type == v3.AttributeKeyTypeResource {
continue
}
val := item.Value
// generate the key
columnName := getColumnName(item.Key)
var fmtVal string
item.Operator = v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
if item.Operator != v3.FilterOperatorExists && item.Operator != v3.FilterOperatorNotExists {
var err error
val, err = utils.ValidateAndCastValue(val, item.Key.DataType)
if err != nil {
return "", fmt.Errorf("invalid value for key %s: %v", item.Key.Key, err)
}
}
if val != nil {
fmtVal = utils.ClickHouseFormattedValue(val)
}
if operator, ok := tracesOperatorMappingV3[item.Operator]; ok {
switch item.Operator {
case v3.FilterOperatorContains, v3.FilterOperatorNotContains:
// we also want to treat %, _ as literals for contains
val := utils.QuoteEscapedStringForContains(fmt.Sprintf("%s", item.Value), false)
conditions = append(conditions, fmt.Sprintf("%s %s '%%%s%%'", columnName, operator, val))
case v3.FilterOperatorRegex, v3.FilterOperatorNotRegex:
conditions = append(conditions, fmt.Sprintf(operator, columnName, fmtVal))
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
if item.Key.IsColumn {
subQuery, err := tracesV3.ExistsSubQueryForFixedColumn(item.Key, item.Operator)
if err != nil {
return "", err
}
conditions = append(conditions, subQuery)
} else {
cType := getClickHouseTracesColumnType(item.Key.Type)
cDataType := getClickHouseTracesColumnDataType(item.Key.DataType)
col := fmt.Sprintf("%s_%s", cType, cDataType)
conditions = append(conditions, fmt.Sprintf(operator, col, item.Key.Key))
}
default:
conditions = append(conditions, fmt.Sprintf("%s %s %s", columnName, operator, fmtVal))
}
} else {
return "", fmt.Errorf("unsupported operator %s", item.Operator)
}
}
}
queryString := strings.Join(conditions, " AND ")
return queryString, nil
}
func handleEmptyValuesInGroupBy(groupBy []v3.AttributeKey) (string, error) {
// TODO(nitya): in future when we support user based mat column handle them
// skipping now as we don't support creating them
filterItems := []v3.FilterItem{}
if len(groupBy) != 0 {
for _, item := range groupBy {
if !item.IsColumn {
filterItems = append(filterItems, v3.FilterItem{
Key: item,
Operator: v3.FilterOperatorExists,
})
}
}
}
if len(filterItems) != 0 {
filterSet := v3.FilterSet{
Operator: "AND",
Items: filterItems,
}
return buildTracesFilterQuery(&filterSet)
}
return "", nil
}
// orderBy returns a string of comma separated tags for order by clause
// if there are remaining items which are not present in tags they are also added
// if the order is not specified, it defaults to ASC
func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]struct{}) []string {
var orderBy []string
for _, item := range items {
if item.ColumnName == constants.SigNozOrderByValue {
orderBy = append(orderBy, fmt.Sprintf("value %s", item.Order))
} else if _, ok := tagLookup[item.ColumnName]; ok {
orderBy = append(orderBy, fmt.Sprintf("`%s` %s", item.ColumnName, item.Order))
} else if panelType == v3.PanelTypeList {
attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn}
name := getColumnName(attr)
orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order))
}
}
return orderBy
}
func orderByAttributeKeyTags(panelType v3.PanelType, items []v3.OrderBy, tags []v3.AttributeKey) string {
tagLookup := map[string]struct{}{}
for _, v := range tags {
tagLookup[v.Key] = struct{}{}
}
orderByArray := orderBy(panelType, items, tagLookup)
if len(orderByArray) == 0 {
if panelType == v3.PanelTypeList {
orderByArray = append(orderByArray, constants.TIMESTAMP+" DESC")
} else {
orderByArray = append(orderByArray, "value DESC")
}
}
str := strings.Join(orderByArray, ",")
return str
}
func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, panelType v3.PanelType, options v3.QBOptions) (string, error) {
tracesStart := utils.GetEpochNanoSecs(start)
tracesEnd := utils.GetEpochNanoSecs(end)
// -1800 this is added so that the bucket start considers all the fingerprints.
bucketStart := tracesStart/NANOSECOND - 1800
bucketEnd := tracesEnd / NANOSECOND
timeFilter := fmt.Sprintf("(timestamp >= '%d' AND timestamp <= '%d') AND (ts_bucket_start >= %d AND ts_bucket_start <= %d)", tracesStart, tracesEnd, bucketStart, bucketEnd)
filterSubQuery, err := buildTracesFilterQuery(mq.Filters)
if err != nil {
return "", err
}
if filterSubQuery != "" {
filterSubQuery = " AND " + filterSubQuery
}
emptyValuesInGroupByFilter, err := handleEmptyValuesInGroupBy(mq.GroupBy)
if err != nil {
return "", err
}
if emptyValuesInGroupByFilter != "" {
filterSubQuery = filterSubQuery + " AND " + emptyValuesInGroupByFilter
}
resourceSubQuery, err := resource.BuildResourceSubQuery("signoz_traces", "distributed_traces_v3_resource", bucketStart, bucketEnd, mq.Filters, mq.GroupBy, mq.AggregateAttribute, false)
if err != nil {
return "", err
}
// join both the filter clauses
if resourceSubQuery != "" {
filterSubQuery = filterSubQuery + " AND (resource_fingerprint GLOBAL IN " + resourceSubQuery + ")"
}
// timerange will be sent in epoch millisecond
selectLabels := getSelectLabels(mq.GroupBy)
if selectLabels != "" {
selectLabels = selectLabels + ","
}
orderBy := orderByAttributeKeyTags(panelType, mq.OrderBy, mq.GroupBy)
if orderBy != "" {
orderBy = " order by " + orderBy
}
if mq.AggregateOperator == v3.AggregateOperatorNoOp {
var query string
if panelType == v3.PanelTypeTrace {
withSubQuery := fmt.Sprintf(constants.TracesExplorerViewSQLSelectWithSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_V3_LOCAL_TABLENAME, timeFilter, filterSubQuery)
withSubQuery = tracesV3.AddLimitToQuery(withSubQuery, mq.Limit)
if mq.Offset != 0 {
withSubQuery = tracesV3.AddOffsetToQuery(withSubQuery, mq.Offset)
}
query = fmt.Sprintf(constants.TracesExplorerViewSQLSelectBeforeSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_V3) + withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectAfterSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_V3, timeFilter)
} else if panelType == v3.PanelTypeList {
if len(mq.SelectColumns) == 0 {
return "", fmt.Errorf("select columns cannot be empty for panelType %s", panelType)
}
// add it to the select labels
selectLabels = getSelectLabels(mq.SelectColumns)
queryNoOpTmpl := fmt.Sprintf("SELECT timestamp as timestamp_datetime, spanID, traceID,%s ", selectLabels) + "from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_V3 + " where %s %s" + "%s"
query = fmt.Sprintf(queryNoOpTmpl, timeFilter, filterSubQuery, orderBy)
} else {
return "", fmt.Errorf("unsupported aggregate operator %s for panelType %s", mq.AggregateOperator, panelType)
}
return query, nil
// ---- NOOP ends here ----
}
having := tracesV3.Having(mq.Having)
if having != "" {
having = " having " + having
}
groupBy := tracesV3.GroupByAttributeKeyTags(panelType, options.GraphLimitQtype, mq.GroupBy...)
if groupBy != "" {
groupBy = " group by " + groupBy
}
aggregationKey := ""
if mq.AggregateAttribute.Key != "" {
aggregationKey = getColumnName(mq.AggregateAttribute)
}
var queryTmpl string
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
queryTmpl = "SELECT"
} else if panelType == v3.PanelTypeTable {
queryTmpl =
"SELECT "
} else if panelType == v3.PanelTypeGraph || panelType == v3.PanelTypeValue {
// Select the aggregate value for interval
queryTmpl =
fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d SECOND) AS ts,", step)
}
queryTmpl = queryTmpl + selectLabels +
" %s as value " +
"from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_V3 +
" where " + timeFilter + "%s" +
"%s%s" +
"%s"
// we don't need value for first query
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
queryTmpl = "SELECT " + tracesV3.GetSelectKeys(mq.AggregateOperator, mq.GroupBy) + " from (" + queryTmpl + ")"
}
if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", tracesV3.GetSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "%s)"
}
switch mq.AggregateOperator {
case v3.AggregateOperatorRateSum,
v3.AggregateOperatorRateMax,
v3.AggregateOperatorRateAvg,
v3.AggregateOperatorRateMin,
v3.AggregateOperatorRate:
rate := float64(step)
if options.PreferRPM {
rate = rate / 60.0
}
op := fmt.Sprintf("%s(%s)/%f", tracesV3.AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case
v3.AggregateOperatorP05,
v3.AggregateOperatorP10,
v3.AggregateOperatorP20,
v3.AggregateOperatorP25,
v3.AggregateOperatorP50,
v3.AggregateOperatorP75,
v3.AggregateOperatorP90,
v3.AggregateOperatorP95,
v3.AggregateOperatorP99:
op := fmt.Sprintf("quantile(%v)(%s)", tracesV3.AggregateOperatorToPercentile[mq.AggregateOperator], aggregationKey)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case v3.AggregateOperatorAvg, v3.AggregateOperatorSum, v3.AggregateOperatorMin, v3.AggregateOperatorMax:
op := fmt.Sprintf("%s(%s)", tracesV3.AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case v3.AggregateOperatorCount:
if mq.AggregateAttribute.Key != "" {
if mq.AggregateAttribute.IsColumn {
subQuery, err := tracesV3.ExistsSubQueryForFixedColumn(mq.AggregateAttribute, v3.FilterOperatorExists)
if err == nil {
filterSubQuery = fmt.Sprintf("%s AND %s", filterSubQuery, subQuery)
}
} else {
column := getColumnName(mq.AggregateAttribute)
filterSubQuery = fmt.Sprintf("%s AND has(%s, '%s')", filterSubQuery, column, mq.AggregateAttribute.Key)
}
}
op := "toFloat64(count())"
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
case v3.AggregateOperatorCountDistinct:
op := fmt.Sprintf("toFloat64(count(distinct(%s)))", aggregationKey)
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
return query, nil
default:
return "", fmt.Errorf("unsupported aggregate operator %s", mq.AggregateOperator)
}
}
// PrepareTracesQuery returns the query string for traces
// start and end are in epoch millisecond
// step is in seconds
func PrepareTracesQuery(start, end int64, panelType v3.PanelType, mq *v3.BuilderQuery, options v3.QBOptions) (string, error) {
// adjust the start and end time to the step interval
if panelType == v3.PanelTypeGraph {
// adjust the start and end time to the step interval for graph panel types
start = start - (start % (mq.StepInterval * 1000))
end = end - (end % (mq.StepInterval * 1000))
}
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
// give me just the group by names
query, err := buildTracesQuery(start, end, mq.StepInterval, mq, panelType, options)
if err != nil {
return "", err
}
query = tracesV3.AddLimitToQuery(query, mq.Limit)
return query, nil
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
query, err := buildTracesQuery(start, end, mq.StepInterval, mq, panelType, options)
if err != nil {
return "", err
}
return query, nil
}
query, err := buildTracesQuery(start, end, mq.StepInterval, mq, panelType, options)
if err != nil {
return "", err
}
if panelType == v3.PanelTypeValue {
query, err = tracesV3.ReduceToQuery(query, mq.ReduceTo, mq.AggregateOperator)
}
if panelType == v3.PanelTypeList || panelType == v3.PanelTypeTable {
query = tracesV3.AddLimitToQuery(query, mq.Limit)
if mq.Offset != 0 {
query = tracesV3.AddOffsetToQuery(query, mq.Offset)
}
}
return query, err
}

View File

@@ -0,0 +1,729 @@
package v4
import (
"testing"
"go.signoz.io/signoz/pkg/query-service/constants"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
func Test_getClickHouseTracesColumnType(t *testing.T) {
type args struct {
columnType v3.AttributeKeyType
}
tests := []struct {
name string
args args
want string
}{
{
name: "tag",
args: args{
columnType: v3.AttributeKeyTypeTag,
},
want: "attributes",
},
{
name: "resource",
args: args{
columnType: v3.AttributeKeyTypeResource,
},
want: "resources",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := getClickHouseTracesColumnType(tt.args.columnType); got != tt.want {
t.Errorf("GetClickhouseTracesColumnType() = %v, want %v", got, tt.want)
}
})
}
}
func Test_getClickHouseTracesColumnDataType(t *testing.T) {
type args struct {
columnDataType v3.AttributeKeyDataType
}
tests := []struct {
name string
args args
want string
}{
{
name: "string",
args: args{
columnDataType: v3.AttributeKeyDataTypeString,
},
want: "string",
},
{
name: "float64",
args: args{
columnDataType: v3.AttributeKeyDataTypeFloat64,
},
want: "number",
},
{
name: "int64",
args: args{
columnDataType: v3.AttributeKeyDataTypeInt64,
},
want: "number",
},
{
name: "bool",
args: args{
columnDataType: v3.AttributeKeyDataTypeBool,
},
want: "bool",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := getClickHouseTracesColumnDataType(tt.args.columnDataType); got != tt.want {
t.Errorf("getClickhouseTracesColumnDataType() = %v, want %v", got, tt.want)
}
})
}
}
func Test_getColumnName(t *testing.T) {
type args struct {
key v3.AttributeKey
}
tests := []struct {
name string
args args
want string
}{
{
name: "tag",
args: args{
key: v3.AttributeKey{Key: "data", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
},
want: "attributes_string['data']",
},
{
name: "column",
args: args{
key: v3.AttributeKey{Key: "data", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
},
want: "`attribute_string_data`",
},
{
name: "static column",
args: args{
key: v3.AttributeKey{Key: "spanKind", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
},
want: "spanKind",
},
{
name: "missing meta",
args: args{
key: v3.AttributeKey{Key: "xyz"},
},
want: "attributes_string['xyz']",
},
{
name: "new composite column",
args: args{
key: v3.AttributeKey{Key: "response_status_code"},
},
want: "response_status_code",
},
{
name: "new composite column with metadata",
args: args{
key: v3.AttributeKey{Key: "response_status_code", DataType: v3.AttributeKeyDataTypeString, IsColumn: true},
},
want: "response_status_code",
},
{
name: "new normal column with metadata",
args: args{
key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
},
want: "`attribute_string_http$$route`",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := getColumnName(tt.args.key); got != tt.want {
t.Errorf("getColumnName() = %v, want %v", got, tt.want)
}
})
}
}
func Test_getSelectLabels(t *testing.T) {
type args struct {
groupBy []v3.AttributeKey
}
tests := []struct {
name string
args args
want string
}{
{
name: "count",
args: args{
groupBy: []v3.AttributeKey{{Key: "user_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
},
want: " attributes_string['user_name'] as `user_name`",
},
{
name: "multiple group by",
args: args{
groupBy: []v3.AttributeKey{
{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, // static col
{Key: "service_name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource, IsColumn: true},
},
},
want: " name as `name`, `resource_string_service_name` as `service_name`",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := getSelectLabels(tt.args.groupBy); got != tt.want {
t.Errorf("getSelectLabels() = %v, want %v", got, tt.want)
}
})
}
}
func Test_buildTracesFilterQuery(t *testing.T) {
type args struct {
fs *v3.FilterSet
}
tests := []struct {
name string
args args
want string
wantErr bool
}{
{
name: "Test ignore resource",
args: args{
fs: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service.name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: []interface{}{"service"}, Operator: v3.FilterOperatorIn},
},
}},
want: "",
},
{
name: "Test buildTracesFilterQuery in, nin",
args: args{
fs: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"GET", "POST"}, Operator: v3.FilterOperatorIn},
{Key: v3.AttributeKey{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"PUT"}, Operator: v3.FilterOperatorNotIn},
{Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: []interface{}{"server"}, Operator: v3.FilterOperatorNotIn},
{Key: v3.AttributeKey{Key: "status.code", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{200}, Operator: v3.FilterOperatorNotIn},
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeFloat64, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{100.0}, Operator: v3.FilterOperatorIn},
{Key: v3.AttributeKey{Key: "isDone", DataType: v3.AttributeKeyDataTypeBool, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{true}, Operator: v3.FilterOperatorIn},
}},
},
want: "attributes_string['method'] IN ['GET','POST'] AND attributes_string['method'] NOT IN ['PUT'] AND attributes_number['status.code'] NOT IN [200] AND attributes_number['duration'] IN [100] AND attributes_bool['isDone'] IN [true]",
wantErr: false,
},
{
name: "Test buildTracesFilterQuery not eq, neq, gt, lt, gte, lte",
args: args{
fs: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 102, Operator: v3.FilterOperatorEqual},
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 100, Operator: v3.FilterOperatorNotEqual},
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: v3.FilterOperatorGreaterThan},
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 200, Operator: v3.FilterOperatorLessThan},
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeFloat64, Type: v3.AttributeKeyTypeTag}, Value: 10.0, Operator: v3.FilterOperatorGreaterThanOrEq},
{Key: v3.AttributeKey{Key: "duration_str", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: v3.FilterOperatorLessThanOrEq},
}},
},
want: "attributes_number['duration'] = 102 AND attributes_number['duration'] != 100 AND attributes_number['duration'] > 10 AND attributes_number['duration'] < 200" +
" AND attributes_number['duration'] >= 10.000000 AND attributes_string['duration_str'] <= '200'",
wantErr: false,
},
{
name: "Test contains, ncontains, like, nlike, regex, nregex",
args: args{
fs: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "102.%", Operator: v3.FilterOperatorContains},
{Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "103_", Operator: v3.FilterOperatorNotContains},
{Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "102.", Operator: v3.FilterOperatorLike},
{Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "102", Operator: v3.FilterOperatorNotLike},
{Key: v3.AttributeKey{Key: "path", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "/mypath", Operator: v3.FilterOperatorRegex},
{Key: v3.AttributeKey{Key: "path", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "/health.*", Operator: v3.FilterOperatorNotRegex},
}},
},
want: "attributes_string['host'] ILIKE '%102.\\%%' AND attributes_string['host'] NOT ILIKE '%103\\_%' AND attributes_string['host'] ILIKE '102.' AND attributes_string['host'] NOT ILIKE '102' AND " +
"match(`attribute_string_path`, '/mypath') AND NOT match(`attribute_string_path`, '/health.*')",
},
{
name: "Test exists, nexists",
args: args{
fs: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "host", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: v3.FilterOperatorExists},
{Key: v3.AttributeKey{Key: "duration", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Operator: v3.FilterOperatorExists},
{Key: v3.AttributeKey{Key: "isDone", DataType: v3.AttributeKeyDataTypeBool, Type: v3.AttributeKeyTypeTag}, Operator: v3.FilterOperatorNotExists},
{Key: v3.AttributeKey{Key: "host1", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: v3.FilterOperatorNotExists},
{Key: v3.AttributeKey{Key: "path", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Operator: v3.FilterOperatorNotExists},
}},
},
want: "mapContains(attributes_string, 'host') AND mapContains(attributes_number, 'duration') AND NOT mapContains(attributes_bool, 'isDone') AND NOT mapContains(attributes_string, 'host1') AND path = ''",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := buildTracesFilterQuery(tt.args.fs)
if (err != nil) != tt.wantErr {
t.Errorf("buildTracesFilterQuery() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("buildTracesFilterQuery() = %v, want %v", got, tt.want)
}
})
}
}
func Test_handleEmptyValuesInGroupBy(t *testing.T) {
type args struct {
groupBy []v3.AttributeKey
}
tests := []struct {
name string
args args
want string
wantErr bool
}{
{
name: "Test handleEmptyValuesInGroupBy",
args: args{
groupBy: []v3.AttributeKey{{Key: "bytes", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
},
want: "mapContains(attributes_string, 'bytes')",
wantErr: false,
},
{
name: "Test handleEmptyValuesInGroupBy",
args: args{
groupBy: []v3.AttributeKey{{Key: "bytes", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}},
},
want: "",
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := handleEmptyValuesInGroupBy(tt.args.groupBy)
if (err != nil) != tt.wantErr {
t.Errorf("handleEmptyValuesInGroupBy() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("handleEmptyValuesInGroupBy() = %v, want %v", got, tt.want)
}
})
}
}
func Test_orderByAttributeKeyTags(t *testing.T) {
type args struct {
panelType v3.PanelType
items []v3.OrderBy
tags []v3.AttributeKey
}
tests := []struct {
name string
args args
want string
}{
{
name: "test",
args: args{
panelType: v3.PanelTypeGraph,
items: []v3.OrderBy{{ColumnName: "name", Order: "ASC"}},
tags: []v3.AttributeKey{{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
},
want: "`name` ASC",
},
{
name: "order by value",
args: args{
panelType: v3.PanelTypeGraph,
items: []v3.OrderBy{{ColumnName: "name", Order: "ASC"}, {ColumnName: constants.SigNozOrderByValue, Order: "DESC"}},
tags: []v3.AttributeKey{{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
},
want: "`name` ASC,value DESC",
},
{
name: "test",
args: args{
panelType: v3.PanelTypeList,
items: []v3.OrderBy{{ColumnName: "status", Order: "DESC", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
{ColumnName: "route", Order: "DESC", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}},
},
want: "attributes_string['status'] DESC,`attribute_string_route` DESC",
},
{
name: "ignore order by in table panel",
args: args{
panelType: v3.PanelTypeTable,
items: []v3.OrderBy{{ColumnName: "timestamp", Order: "DESC"}},
tags: []v3.AttributeKey{},
},
want: "value DESC",
},
{
name: "add default order by ts for list panel",
args: args{
panelType: v3.PanelTypeList,
items: []v3.OrderBy{},
tags: []v3.AttributeKey{},
},
want: "timestamp DESC",
},
{
name: "add default order by value for graph panel",
args: args{
panelType: v3.PanelTypeGraph,
items: []v3.OrderBy{},
tags: []v3.AttributeKey{},
},
want: "value DESC",
},
{
name: "don't add default order by for table panel",
args: args{
panelType: v3.PanelTypeTable,
items: []v3.OrderBy{},
tags: []v3.AttributeKey{},
},
want: "value DESC",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := orderByAttributeKeyTags(tt.args.panelType, tt.args.items, tt.args.tags); got != tt.want {
t.Errorf("orderByAttributeKeyTags() = %v, want %v", got, tt.want)
}
})
}
}
func Test_buildTracesQuery(t *testing.T) {
type args struct {
start int64
end int64
step int64
mq *v3.BuilderQuery
panelType v3.PanelType
options v3.QBOptions
}
tests := []struct {
name string
args args
want string
wantErr bool
}{
{
name: "Test buildTracesQuery",
args: args{
panelType: v3.PanelTypeTable,
start: 1680066360726210000,
end: 1680066458000000000,
step: 1000,
mq: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorCount,
Filters: &v3.FilterSet{
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{Key: "http.method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
Value: 100,
Operator: v3.FilterOperatorEqual,
},
},
},
GroupBy: []v3.AttributeKey{{Key: "http.method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
OrderBy: []v3.OrderBy{
{ColumnName: "http.method", Order: "ASC"}},
},
},
want: "SELECT attributes_string['http.method'] as `http.method`, toFloat64(count()) as value from signoz_traces.distributed_signoz_index_v3 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " +
"AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND attributes_string['http.method'] = '100' AND mapContains(attributes_string, 'http.method') " +
"group by `http.method` order by `http.method` ASC",
},
{
name: "Test buildTracesQuery",
args: args{
panelType: v3.PanelTypeTable,
start: 1680066360726210000,
end: 1680066458000000000,
step: 1000,
mq: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorCount,
Filters: &v3.FilterSet{
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64}, Value: 100, Operator: ">"},
{Key: v3.AttributeKey{Key: "service.name", Type: v3.AttributeKeyTypeResource, DataType: v3.AttributeKeyDataTypeString}, Value: "myService", Operator: "="},
},
},
GroupBy: []v3.AttributeKey{{Key: "host", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeResource}},
OrderBy: []v3.OrderBy{
{ColumnName: "host", Order: "ASC"}},
},
},
want: "SELECT resources_number['host'] as `host`, toFloat64(count()) as value from signoz_traces.distributed_signoz_index_v3 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " +
"AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND attributes_number['bytes'] > 100 AND " +
"(resource_fingerprint GLOBAL IN (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (seen_at_ts_bucket_start >= 1680064560) AND " +
"(seen_at_ts_bucket_start <= 1680066458) AND simpleJSONExtractString(labels, 'service.name') = 'myService' AND labels like '%service.name%myService%' AND " +
"( (simpleJSONHas(labels, 'host') AND labels like '%host%') ))) " +
"group by `host` order by `host` ASC",
},
{
name: "test noop list view",
args: args{
panelType: v3.PanelTypeList,
start: 1680066360726210000,
end: 1680066458000000000,
mq: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorNoOp,
Filters: &v3.FilterSet{},
SelectColumns: []v3.AttributeKey{{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}},
OrderBy: []v3.OrderBy{{ColumnName: "timestamp", Order: "ASC"}},
},
},
want: "SELECT timestamp as timestamp_datetime, spanID, traceID, name as `name` from signoz_traces.distributed_signoz_index_v3 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " +
"AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) order by timestamp ASC",
},
{
name: "test noop list view-without ts",
args: args{
panelType: v3.PanelTypeList,
start: 1680066360726210000,
end: 1680066458000000000,
mq: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorNoOp,
Filters: &v3.FilterSet{},
SelectColumns: []v3.AttributeKey{{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}},
},
},
want: "SELECT timestamp as timestamp_datetime, spanID, traceID, name as `name` from signoz_traces.distributed_signoz_index_v3 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') " +
"AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) order by timestamp DESC",
},
{
name: "test noop trace view",
args: args{
panelType: v3.PanelTypeTrace,
start: 1680066360726210000,
end: 1680066458000000000,
mq: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorNoOp,
Filters: &v3.FilterSet{
Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "GET", Operator: "="},
{Key: v3.AttributeKey{Key: "service.name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "myService", Operator: "="},
},
},
},
},
want: "SELECT subQuery.serviceName, subQuery.name, count() AS span_count, subQuery.durationNano, subQuery.traceID AS traceID FROM signoz_traces.distributed_signoz_index_v3 INNER JOIN " +
"( SELECT * FROM (SELECT traceID, durationNano, serviceName, name FROM signoz_traces.signoz_index_v3 WHERE parentSpanID = '' AND (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') AND " +
"(ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND attributes_string['method'] = 'GET' AND (resource_fingerprint GLOBAL IN (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource " +
"WHERE (seen_at_ts_bucket_start >= 1680064560) AND (seen_at_ts_bucket_start <= 1680066458) AND simpleJSONExtractString(labels, 'service.name') = 'myService' AND labels like '%service.name%myService%')) " +
"ORDER BY durationNano DESC LIMIT 1 BY traceID LIMIT 100) AS inner_subquery ) AS subQuery ON signoz_traces.distributed_signoz_index_v3.traceID = subQuery.traceID WHERE (timestamp >= '1680066360726210000' AND " +
"timestamp <= '1680066458000000000') AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) GROUP BY subQuery.traceID, subQuery.durationNano, subQuery.name, subQuery.serviceName ORDER BY " +
"subQuery.durationNano desc LIMIT 1 BY subQuery.traceID;",
},
{
name: "Test order by value with having",
args: args{
panelType: v3.PanelTypeTable,
start: 1680066360726210000,
end: 1680066458000000000,
mq: &v3.BuilderQuery{
AggregateOperator: v3.AggregateOperatorCountDistinct,
Filters: &v3.FilterSet{},
AggregateAttribute: v3.AttributeKey{Key: "name", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "ASC"}},
Having: []v3.Having{
{
ColumnName: "name",
Operator: ">",
Value: 10,
},
},
},
},
want: "SELECT toFloat64(count(distinct(name))) as value from signoz_traces.distributed_signoz_index_v3 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') AND " +
"(ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) having value > 10 order by value ASC",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := buildTracesQuery(tt.args.start, tt.args.end, tt.args.step, tt.args.mq, tt.args.panelType, tt.args.options)
if (err != nil) != tt.wantErr {
t.Errorf("buildTracesQuery() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("buildTracesQuery() = %v, want %v", got, tt.want)
}
})
}
}
func TestPrepareTracesQuery(t *testing.T) {
type args struct {
start int64
end int64
panelType v3.PanelType
mq *v3.BuilderQuery
options v3.QBOptions
}
tests := []struct {
name string
args args
want string
wantErr bool
}{
{
name: "test with limit - first",
args: args{
start: 1680066360726210000,
end: 1680066458000000000,
panelType: v3.PanelTypeTable,
mq: &v3.BuilderQuery{
StepInterval: 60,
AggregateOperator: v3.AggregateOperatorCountDistinct,
Filters: &v3.FilterSet{},
AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
GroupBy: []v3.AttributeKey{{Key: "function", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
Limit: 10,
OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "DESC"}},
},
options: v3.QBOptions{
GraphLimitQtype: constants.FirstQueryGraphLimit,
},
},
want: "SELECT `function` from (SELECT attributes_string['function'] as `function`, toFloat64(count(distinct(name))) as value from signoz_traces.distributed_signoz_index_v3 " +
"where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND mapContains(attributes_string, 'function') group by `function` order by value DESC) LIMIT 10",
},
{
name: "test with limit - second",
args: args{
start: 1680066360726210000,
end: 1680066458000000000,
panelType: v3.PanelTypeTable,
mq: &v3.BuilderQuery{
StepInterval: 60,
AggregateOperator: v3.AggregateOperatorCountDistinct,
Filters: &v3.FilterSet{},
AggregateAttribute: v3.AttributeKey{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
GroupBy: []v3.AttributeKey{{Key: "function", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}},
OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "DESC"}},
Limit: 10,
},
options: v3.QBOptions{
GraphLimitQtype: constants.SecondQueryGraphLimit,
},
},
want: "SELECT attributes_string['function'] as `function`, toFloat64(count(distinct(name))) as value from signoz_traces.distributed_signoz_index_v3 where " +
"(timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND mapContains(attributes_string, 'function') AND (`function`) GLOBAL IN (%s) group by `function` order by value DESC",
},
{
name: "test with limit with resources- first",
args: args{
start: 1680066360726210000,
end: 1680066458000000000,
panelType: v3.PanelTypeTable,
mq: &v3.BuilderQuery{
StepInterval: 60,
AggregateOperator: v3.AggregateOperatorCountDistinct,
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{Key: "line", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag},
Value: 100,
Operator: v3.FilterOperatorEqual,
},
{
Key: v3.AttributeKey{Key: "hostname", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource},
Value: "server1",
Operator: v3.FilterOperatorEqual,
},
},
},
AggregateAttribute: v3.AttributeKey{Key: "name", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
GroupBy: []v3.AttributeKey{
{Key: "function", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
{Key: "service.name", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource},
},
Limit: 10,
OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "DESC"}},
},
options: v3.QBOptions{
GraphLimitQtype: constants.FirstQueryGraphLimit,
},
},
want: "SELECT `function`,`service.name` from (SELECT `attribute_string_function` as `function`, `resource_string_service$$name` as `service.name`, toFloat64(count(distinct(name))) as value " +
"from signoz_traces.distributed_signoz_index_v3 where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
"AND attributes_number['line'] = 100 AND (resource_fingerprint GLOBAL IN (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE " +
"(seen_at_ts_bucket_start >= 1680064560) AND (seen_at_ts_bucket_start <= 1680066458) AND simpleJSONExtractString(labels, 'hostname') = 'server1' AND labels like '%hostname%server1%' AND " +
"( (simpleJSONHas(labels, 'service.name') AND labels like '%service.name%') ))) group by `function`,`service.name` order by value DESC) LIMIT 10",
},
{
name: "test with limit with resources - second",
args: args{
start: 1680066360726210000,
end: 1680066458000000000,
panelType: v3.PanelTypeTable,
mq: &v3.BuilderQuery{
StepInterval: 60,
AggregateOperator: v3.AggregateOperatorCountDistinct,
Filters: &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{Key: "line", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag},
Value: 100,
Operator: v3.FilterOperatorEqual,
},
{
Key: v3.AttributeKey{Key: "hostname", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource},
Value: "server1",
Operator: v3.FilterOperatorEqual,
},
},
},
AggregateAttribute: v3.AttributeKey{Key: "name", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
GroupBy: []v3.AttributeKey{
{Key: "function", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
{Key: "serviceName", IsColumn: true, DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
},
OrderBy: []v3.OrderBy{{ColumnName: "#SIGNOZ_VALUE", Order: "DESC"}},
Limit: 10,
},
options: v3.QBOptions{
GraphLimitQtype: constants.SecondQueryGraphLimit,
},
},
want: "SELECT `attribute_string_function` as `function`, serviceName as `serviceName`, toFloat64(count(distinct(name))) as value from signoz_traces.distributed_signoz_index_v3 " +
"where (timestamp >= '1680066360726210000' AND timestamp <= '1680066458000000000') AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND attributes_number['line'] = 100 " +
"AND (resource_fingerprint GLOBAL IN (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (seen_at_ts_bucket_start >= 1680064560) AND (seen_at_ts_bucket_start <= 1680066458) " +
"AND simpleJSONExtractString(labels, 'hostname') = 'server1' AND labels like '%hostname%server1%')) AND (`function`,`serviceName`) GLOBAL IN (%s) group by `function`,`serviceName` order by value DESC",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := PrepareTracesQuery(tt.args.start, tt.args.end, tt.args.panelType, tt.args.mq, tt.args.options)
if (err != nil) != tt.wantErr {
t.Errorf("PrepareTracesQuery() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("PrepareTracesQuery() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -239,6 +239,8 @@ const (
SIGNOZ_TRACE_DBNAME = "signoz_traces"
SIGNOZ_SPAN_INDEX_TABLENAME = "distributed_signoz_index_v2"
SIGNOZ_SPAN_INDEX_LOCAL_TABLENAME = "signoz_index_v2"
SIGNOZ_SPAN_INDEX_V3 = "distributed_signoz_index_v3"
SIGNOZ_SPAN_INDEX_V3_LOCAL_TABLENAME = "signoz_index_v3"
SIGNOZ_TIMESERIES_v4_LOCAL_TABLENAME = "time_series_v4"
SIGNOZ_TIMESERIES_v4_6HRS_LOCAL_TABLENAME = "time_series_v4_6hrs"
SIGNOZ_TIMESERIES_v4_1DAY_LOCAL_TABLENAME = "time_series_v4_1day"
@@ -444,3 +446,203 @@ const MaxFilterSuggestionsExamplesLimit = 10
var SpanRenderLimitStr = GetOrDefaultEnv("SPAN_RENDER_LIMIT", "2500")
var MaxSpansInTraceStr = GetOrDefaultEnv("MAX_SPANS_IN_TRACE", "250000")
var StaticFieldsTraces = map[string]v3.AttributeKey{
"timestamp": {},
"traceID": {
Key: "traceID",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"spanID": {
Key: "spanID",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"parentSpanID": {
Key: "parentSpanID",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"name": {
Key: "name",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"serviceName": {
Key: "serviceName",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"kind": {
Key: "kind",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"spanKind": {
Key: "spanKind",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"durationNano": {
Key: "durationNano",
DataType: v3.AttributeKeyDataTypeFloat64,
IsColumn: true,
},
"statusCode": {
Key: "statusCode",
DataType: v3.AttributeKeyDataTypeFloat64,
IsColumn: true,
},
"hasError": {
Key: "hasError",
DataType: v3.AttributeKeyDataTypeBool,
IsColumn: true,
},
"statusMessage": {
Key: "statusMessage",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"statusCodeString": {
Key: "statusCodeString",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"externalHttpMethod": {
Key: "externalHttpMethod",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"externalHttpUrl": {
Key: "externalHttpUrl",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"dbSystem": {
Key: "dbSystem",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"dbName": {
Key: "dbName",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"dbOperation": {
Key: "dbOperation",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"peerService": {
Key: "peerService",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"httpMethod": {
Key: "httpMethod",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"httpUrl": {
Key: "httpUrl",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"httpRoute": {
Key: "httpRoute",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"httpHost": {
Key: "httpHost",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"msgSystem": {
Key: "msgSystem",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"msgOperation": {
Key: "msgOperation",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"rpcSystem": {
Key: "rpcSystem",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"rpcService": {
Key: "rpcService",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"rpcMethod": {
Key: "rpcMethod",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"responseStatusCode": {
Key: "responseStatusCode",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
// new support
"response_status_code": {
Key: "response_status_code",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"external_http_url": {
Key: "external_http_url",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"http_url": {
Key: "http_url",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"external_http_method": {
Key: "external_http_method",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"http_method": {
Key: "http_method",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"http_host": {
Key: "http_host",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"db_name": {
Key: "db_name",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"db_operation": {
Key: "db_operation",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"has_error": {
Key: "has_error",
DataType: v3.AttributeKeyDataTypeBool,
IsColumn: true,
},
"is_remote": {
Key: "is_remote",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
// the simple attributes are not present here as
// they are taken care by new format <attribute_type>_<attribute_datatype>_'<attribute_key>'
}
const TRACE_V4_MAX_PAGINATION_LIMIT = 10000

View File

@@ -183,7 +183,7 @@ func PrepareFilters(labels map[string]string, whereClauseItems []v3.FilterItem,
var attrFound bool
// as of now this logic will only apply for logs
for _, tKey := range utils.GenerateLogEnrichmentKeys(v3.AttributeKey{Key: key}) {
for _, tKey := range utils.GenerateEnrichmentKeys(v3.AttributeKey{Key: key}) {
if val, ok := keys[tKey]; ok {
attributeKey = val
attrFound = true

View File

@@ -29,15 +29,10 @@ type Reader interface {
// GetDisks returns a list of disks configured in the underlying DB. It is supported by
// clickhouse only.
GetDisks(ctx context.Context) (*[]model.DiskItem, *model.ApiError)
GetSpanFilters(ctx context.Context, query *model.SpanFilterParams) (*model.SpanFiltersResponse, *model.ApiError)
GetTraceAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest) (*v3.AggregateAttributeResponse, error)
GetTraceAttributeKeys(ctx context.Context, req *v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error)
GetTraceAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error)
GetSpanAttributeKeys(ctx context.Context) (map[string]v3.AttributeKey, error)
GetTagFilters(ctx context.Context, query *model.TagFilterParams) (*model.TagFilters, *model.ApiError)
GetTagValues(ctx context.Context, query *model.TagFilterParams) (*model.TagValues, *model.ApiError)
GetFilteredSpans(ctx context.Context, query *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError)
GetFilteredSpansAggregates(ctx context.Context, query *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError)
ListErrors(ctx context.Context, params *model.ListErrorsParams) (*[]model.Error, *model.ApiError)
CountErrors(ctx context.Context, params *model.CountErrorsParams) (uint64, *model.ApiError)

View File

@@ -546,6 +546,9 @@ type SignozLogV2 struct {
SeverityText string `json:"severity_text" ch:"severity_text"`
SeverityNumber uint8 `json:"severity_number" ch:"severity_number"`
Body string `json:"body" ch:"body"`
ScopeName string `json:"scope_name" ch:"scope_name"`
ScopeVersion string `json:"scope_version" ch:"scope_version"`
ScopeString map[string]string `json:"scope_string" ch:"scope_string"`
Resources_string map[string]string `json:"resources_string" ch:"resources_string"`
Attributes_string map[string]string `json:"attributes_string" ch:"attributes_string"`
Attributes_number map[string]float64 `json:"attributes_float" ch:"attributes_number"`

View File

@@ -463,9 +463,9 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
}
} else if r.compareOp() == ValueOutsideBounds {
for _, smpl := range series.Points {
if math.Abs(smpl.Value) >= r.targetVal() {
if math.Abs(smpl.Value) < r.targetVal() {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
shouldAlert = false
break
}
}

View File

@@ -347,6 +347,7 @@ func createTelemetry() {
"alertsWithTSV2": alertsInfo.AlertsWithTSV2,
"logsBasedAlerts": alertsInfo.LogsBasedAlerts,
"metricBasedAlerts": alertsInfo.MetricBasedAlerts,
"anomalyBasedAlerts": alertsInfo.AnomalyBasedAlerts,
"tracesBasedAlerts": alertsInfo.TracesBasedAlerts,
"totalChannels": alertsInfo.TotalChannels,
"totalSavedViews": savedViewsInfo.TotalSavedViews,

View File

@@ -9,7 +9,7 @@ type LogsListTsRange struct {
End int64
}
func GetLogsListTsRanges(start, end int64) []LogsListTsRange {
func GetListTsRanges(start, end int64) []LogsListTsRange {
startNano := GetEpochNanoSecs(start)
endNano := GetEpochNanoSecs(end)
result := []LogsListTsRange{}
@@ -40,8 +40,8 @@ func GetLogsListTsRanges(start, end int64) []LogsListTsRange {
}
// This tries to see all possible fields that it can fall back to if some meta is missing
// check Test_GenerateLogEnrichmentKeys for example
func GenerateLogEnrichmentKeys(field v3.AttributeKey) []string {
// check Test_GenerateEnrichmentKeys for example
func GenerateEnrichmentKeys(field v3.AttributeKey) []string {
names := []string{}
if field.Type != v3.AttributeKeyTypeUnspecified && field.DataType != v3.AttributeKeyDataTypeUnspecified {
names = append(names, field.Key+"##"+field.Type.String()+"##"+field.DataType.String())

View File

@@ -7,7 +7,7 @@ import (
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
func TestLogsListTsRange(t *testing.T) {
func TestListTsRange(t *testing.T) {
startEndData := []struct {
name string
start int64
@@ -44,7 +44,7 @@ func TestLogsListTsRange(t *testing.T) {
}
for _, test := range startEndData {
res := GetLogsListTsRanges(test.start, test.end)
res := GetListTsRanges(test.start, test.end)
for i, v := range res {
if test.res[i].Start != v.Start || test.res[i].End != v.End {
t.Errorf("expected range was %v - %v, got %v - %v", v.Start, v.End, test.res[i].Start, test.res[i].End)
@@ -53,7 +53,7 @@ func TestLogsListTsRange(t *testing.T) {
}
}
func Test_GenerateLogEnrichmentKeys(t *testing.T) {
func Test_GenerateEnrichmentKeys(t *testing.T) {
type args struct {
field v3.AttributeKey
}
@@ -96,8 +96,8 @@ func Test_GenerateLogEnrichmentKeys(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := GenerateLogEnrichmentKeys(tt.args.field); !reflect.DeepEqual(got, tt.want) {
t.Errorf("generateLogEnrichmentKeys() = %v, want %v", got, tt.want)
if got := GenerateEnrichmentKeys(tt.args.field); !reflect.DeepEqual(got, tt.want) {
t.Errorf("GenerateEnrichmentKeys() = %v, want %v", got, tt.want)
}
})
}