mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-08 02:39:55 +00:00
Compare commits
2 Commits
feat/timez
...
fixProduce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf74ac7b5e | ||
|
|
55a4056aa5 |
@@ -66,6 +66,28 @@ processors:
|
|||||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||||
detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure.
|
detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure.
|
||||||
timeout: 2s
|
timeout: 2s
|
||||||
|
signozspanmetrics/cumulative:
|
||||||
|
metrics_exporter: clickhousemetricswrite
|
||||||
|
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||||
|
dimensions_cache_size: 100000
|
||||||
|
dimensions:
|
||||||
|
- name: service.namespace
|
||||||
|
default: default
|
||||||
|
- name: deployment.environment
|
||||||
|
default: default
|
||||||
|
# This is added to ensure the uniqueness of the timeseries
|
||||||
|
# Otherwise, identical timeseries produced by multiple replicas of
|
||||||
|
# collectors result in incorrect APM metrics
|
||||||
|
- name: signoz.collector.id
|
||||||
|
- name: service.version
|
||||||
|
- name: browser.platform
|
||||||
|
- name: browser.mobile
|
||||||
|
- name: k8s.cluster.name
|
||||||
|
- name: k8s.node.name
|
||||||
|
- name: k8s.namespace.name
|
||||||
|
- name: host.name
|
||||||
|
- name: host.type
|
||||||
|
- name: container.name
|
||||||
# memory_limiter:
|
# memory_limiter:
|
||||||
# # 80% of maximum memory up to 2G
|
# # 80% of maximum memory up to 2G
|
||||||
# limit_mib: 1500
|
# limit_mib: 1500
|
||||||
@@ -116,8 +138,6 @@ exporters:
|
|||||||
enabled: true
|
enabled: true
|
||||||
clickhousemetricswrite/prometheus:
|
clickhousemetricswrite/prometheus:
|
||||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||||
clickhousemetricswritev2:
|
|
||||||
dsn: tcp://clickhouse:9000/signoz_metrics
|
|
||||||
# logging: {}
|
# logging: {}
|
||||||
clickhouselogsexporter:
|
clickhouselogsexporter:
|
||||||
dsn: tcp://clickhouse:9000/signoz_logs
|
dsn: tcp://clickhouse:9000/signoz_logs
|
||||||
@@ -141,20 +161,20 @@ service:
|
|||||||
pipelines:
|
pipelines:
|
||||||
traces:
|
traces:
|
||||||
receivers: [jaeger, otlp]
|
receivers: [jaeger, otlp]
|
||||||
processors: [signozspanmetrics/delta, batch]
|
processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch]
|
||||||
exporters: [clickhousetraces]
|
exporters: [clickhousetraces]
|
||||||
metrics:
|
metrics:
|
||||||
receivers: [otlp]
|
receivers: [otlp]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
|
exporters: [clickhousemetricswrite]
|
||||||
metrics/hostmetrics:
|
metrics/generic:
|
||||||
receivers: [hostmetrics]
|
receivers: [hostmetrics]
|
||||||
processors: [resourcedetection, batch]
|
processors: [resourcedetection, batch]
|
||||||
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
|
exporters: [clickhousemetricswrite]
|
||||||
metrics/prometheus:
|
metrics/prometheus:
|
||||||
receivers: [prometheus]
|
receivers: [prometheus]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
exporters: [clickhousemetricswrite/prometheus, clickhousemetricswritev2]
|
exporters: [clickhousemetricswrite/prometheus]
|
||||||
logs:
|
logs:
|
||||||
receivers: [otlp, tcplog/docker]
|
receivers: [otlp, tcplog/docker]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
|
|||||||
@@ -57,11 +57,35 @@ receivers:
|
|||||||
labels:
|
labels:
|
||||||
job_name: otel-collector
|
job_name: otel-collector
|
||||||
|
|
||||||
|
|
||||||
processors:
|
processors:
|
||||||
batch:
|
batch:
|
||||||
send_batch_size: 10000
|
send_batch_size: 10000
|
||||||
send_batch_max_size: 11000
|
send_batch_max_size: 11000
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
|
signozspanmetrics/cumulative:
|
||||||
|
metrics_exporter: clickhousemetricswrite
|
||||||
|
metrics_flush_interval: 60s
|
||||||
|
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||||
|
dimensions_cache_size: 100000
|
||||||
|
dimensions:
|
||||||
|
- name: service.namespace
|
||||||
|
default: default
|
||||||
|
- name: deployment.environment
|
||||||
|
default: default
|
||||||
|
# This is added to ensure the uniqueness of the timeseries
|
||||||
|
# Otherwise, identical timeseries produced by multiple replicas of
|
||||||
|
# collectors result in incorrect APM metrics
|
||||||
|
- name: signoz.collector.id
|
||||||
|
- name: service.version
|
||||||
|
- name: browser.platform
|
||||||
|
- name: browser.mobile
|
||||||
|
- name: k8s.cluster.name
|
||||||
|
- name: k8s.node.name
|
||||||
|
- name: k8s.namespace.name
|
||||||
|
- name: host.name
|
||||||
|
- name: host.type
|
||||||
|
- name: container.name
|
||||||
# memory_limiter:
|
# memory_limiter:
|
||||||
# # 80% of maximum memory up to 2G
|
# # 80% of maximum memory up to 2G
|
||||||
# limit_mib: 1500
|
# limit_mib: 1500
|
||||||
@@ -125,8 +149,6 @@ exporters:
|
|||||||
enabled: true
|
enabled: true
|
||||||
clickhousemetricswrite/prometheus:
|
clickhousemetricswrite/prometheus:
|
||||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||||
clickhousemetricswritev2:
|
|
||||||
dsn: tcp://clickhouse:9000/signoz_metrics
|
|
||||||
clickhouselogsexporter:
|
clickhouselogsexporter:
|
||||||
dsn: tcp://clickhouse:9000/signoz_logs
|
dsn: tcp://clickhouse:9000/signoz_logs
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
@@ -146,20 +168,20 @@ service:
|
|||||||
pipelines:
|
pipelines:
|
||||||
traces:
|
traces:
|
||||||
receivers: [jaeger, otlp]
|
receivers: [jaeger, otlp]
|
||||||
processors: [signozspanmetrics/delta, batch]
|
processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch]
|
||||||
exporters: [clickhousetraces]
|
exporters: [clickhousetraces]
|
||||||
metrics:
|
metrics:
|
||||||
receivers: [otlp]
|
receivers: [otlp]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
|
exporters: [clickhousemetricswrite]
|
||||||
metrics/hostmetrics:
|
metrics/generic:
|
||||||
receivers: [hostmetrics]
|
receivers: [hostmetrics]
|
||||||
processors: [resourcedetection, batch]
|
processors: [resourcedetection, batch]
|
||||||
exporters: [clickhousemetricswrite, clickhousemetricswritev2]
|
exporters: [clickhousemetricswrite]
|
||||||
metrics/prometheus:
|
metrics/prometheus:
|
||||||
receivers: [prometheus]
|
receivers: [prometheus]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
exporters: [clickhousemetricswrite/prometheus, clickhousemetricswritev2]
|
exporters: [clickhousemetricswrite/prometheus]
|
||||||
logs:
|
logs:
|
||||||
receivers: [otlp, tcplog/docker]
|
receivers: [otlp, tcplog/docker]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# use a minimal alpine image
|
# use a minimal alpine image
|
||||||
FROM alpine:3.20.3
|
FROM alpine:3.18.6
|
||||||
|
|
||||||
# Add Maintainer Info
|
# Add Maintainer Info
|
||||||
LABEL maintainer="signoz"
|
LABEL maintainer="signoz"
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
"github.com/mattn/go-sqlite3"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/ee/query-service/license/sqlite"
|
"go.signoz.io/signoz/ee/query-service/license/sqlite"
|
||||||
"go.signoz.io/signoz/ee/query-service/model"
|
"go.signoz.io/signoz/ee/query-service/model"
|
||||||
@@ -275,14 +274,14 @@ func (r *Repo) InitFeatures(req basemodel.FeatureSet) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// InsertLicenseV3 inserts a new license v3 in db
|
// InsertLicenseV3 inserts a new license v3 in db
|
||||||
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError {
|
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) error {
|
||||||
|
|
||||||
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
|
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
|
||||||
|
|
||||||
// licsense is the entity of zeus so putting the entire license here without defining schema
|
// licsense is the entity of zeus so putting the entire license here without defining schema
|
||||||
licenseData, err := json.Marshal(l.Data)
|
licenseData, err := json.Marshal(l.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err}
|
return fmt.Errorf("insert license failed: license marshal error")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = r.db.ExecContext(ctx,
|
_, err = r.db.ExecContext(ctx,
|
||||||
@@ -293,14 +292,8 @@ func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.A
|
|||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if sqliteErr, ok := err.(sqlite3.Error); ok {
|
|
||||||
if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique {
|
|
||||||
zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr))
|
|
||||||
return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
||||||
return &model.ApiError{Typ: basemodel.ErrorExec, Err: err}
|
return fmt.Errorf("failed to insert license in db: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -67,30 +67,6 @@ func StartManager(dbType string, db *sqlx.DB, useLicensesV3 bool, features ...ba
|
|||||||
repo: &repo,
|
repo: &repo,
|
||||||
}
|
}
|
||||||
|
|
||||||
if useLicensesV3 {
|
|
||||||
// get active license from the db
|
|
||||||
active, err := m.repo.GetActiveLicense(context.Background())
|
|
||||||
if err != nil {
|
|
||||||
return m, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// if we have an active license then need to fetch the complete details
|
|
||||||
if active != nil {
|
|
||||||
// fetch the new license structure from control plane
|
|
||||||
licenseV3, apiError := validate.ValidateLicenseV3(active.Key)
|
|
||||||
if apiError != nil {
|
|
||||||
return m, apiError
|
|
||||||
}
|
|
||||||
|
|
||||||
// insert the licenseV3 in sqlite db
|
|
||||||
apiError = m.repo.InsertLicenseV3(context.Background(), licenseV3)
|
|
||||||
// if the license already exists move ahead.
|
|
||||||
if apiError != nil && apiError.Typ != model.ErrorConflict {
|
|
||||||
return m, apiError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := m.start(useLicensesV3, features...); err != nil {
|
if err := m.start(useLicensesV3, features...); err != nil {
|
||||||
return m, err
|
return m, err
|
||||||
}
|
}
|
||||||
@@ -487,7 +463,7 @@ func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseRe
|
|||||||
err := lm.repo.InsertLicenseV3(ctx, license)
|
err := lm.repo.InsertLicenseV3(ctx, license)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to activate license", zap.Error(err))
|
zap.L().Error("failed to activate license", zap.Error(err))
|
||||||
return nil, err
|
return nil, model.InternalError(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// license is valid, activate it
|
// license is valid, activate it
|
||||||
|
|||||||
@@ -61,11 +61,6 @@ func NewAnomalyRule(
|
|||||||
|
|
||||||
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
||||||
|
|
||||||
if p.RuleCondition.CompareOp == baserules.ValueIsBelow {
|
|
||||||
target := -1 * *p.RuleCondition.Target
|
|
||||||
p.RuleCondition.Target = &target
|
|
||||||
}
|
|
||||||
|
|
||||||
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
|
baseRule, err := baserules.NewBaseRule(id, p, reader, opts...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@@ -186,7 +186,6 @@
|
|||||||
"@types/webpack-dev-server": "^4.7.2",
|
"@types/webpack-dev-server": "^4.7.2",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.33.0",
|
"@typescript-eslint/eslint-plugin": "^4.33.0",
|
||||||
"@typescript-eslint/parser": "^4.33.0",
|
"@typescript-eslint/parser": "^4.33.0",
|
||||||
"@vvo/tzdb": "6.149.0",
|
|
||||||
"autoprefixer": "10.4.19",
|
"autoprefixer": "10.4.19",
|
||||||
"babel-plugin-styled-components": "^1.12.0",
|
"babel-plugin-styled-components": "^1.12.0",
|
||||||
"compression-webpack-plugin": "9.0.0",
|
"compression-webpack-plugin": "9.0.0",
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ import AppActions from 'types/actions';
|
|||||||
import { UPDATE_USER_IS_FETCH } from 'types/actions/app';
|
import { UPDATE_USER_IS_FETCH } from 'types/actions/app';
|
||||||
import { Organization } from 'types/api/user/getOrganization';
|
import { Organization } from 'types/api/user/getOrganization';
|
||||||
import AppReducer from 'types/reducer/app';
|
import AppReducer from 'types/reducer/app';
|
||||||
import { isCloudUser } from 'utils/app';
|
|
||||||
import { routePermission } from 'utils/permission';
|
import { routePermission } from 'utils/permission';
|
||||||
|
|
||||||
import routes, {
|
import routes, {
|
||||||
@@ -77,8 +76,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
|||||||
|
|
||||||
const { t } = useTranslation(['common']);
|
const { t } = useTranslation(['common']);
|
||||||
|
|
||||||
const isCloudUserVal = isCloudUser();
|
|
||||||
|
|
||||||
const localStorageUserAuthToken = getInitialUserTokenRefreshToken();
|
const localStorageUserAuthToken = getInitialUserTokenRefreshToken();
|
||||||
|
|
||||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||||
@@ -146,7 +143,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
|||||||
const handleRedirectForOrgOnboarding = (key: string): void => {
|
const handleRedirectForOrgOnboarding = (key: string): void => {
|
||||||
if (
|
if (
|
||||||
isLoggedInState &&
|
isLoggedInState &&
|
||||||
isCloudUserVal &&
|
|
||||||
!isFetchingOrgPreferences &&
|
!isFetchingOrgPreferences &&
|
||||||
!isLoadingOrgUsers &&
|
!isLoadingOrgUsers &&
|
||||||
!isEmpty(orgUsers?.payload) &&
|
!isEmpty(orgUsers?.payload) &&
|
||||||
@@ -162,10 +158,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
|||||||
history.push(ROUTES.ONBOARDING);
|
history.push(ROUTES.ONBOARDING);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isCloudUserVal && key === 'ONBOARDING') {
|
|
||||||
history.push(ROUTES.APPLICATION);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleUserLoginIfTokenPresent = async (
|
const handleUserLoginIfTokenPresent = async (
|
||||||
@@ -258,7 +250,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
|||||||
const handleRouting = (): void => {
|
const handleRouting = (): void => {
|
||||||
const showOrgOnboarding = shouldShowOnboarding();
|
const showOrgOnboarding = shouldShowOnboarding();
|
||||||
|
|
||||||
if (showOrgOnboarding && !isOnboardingComplete && isCloudUserVal) {
|
if (showOrgOnboarding && !isOnboardingComplete) {
|
||||||
history.push(ROUTES.ONBOARDING);
|
history.push(ROUTES.ONBOARDING);
|
||||||
} else {
|
} else {
|
||||||
history.push(ROUTES.APPLICATION);
|
history.push(ROUTES.APPLICATION);
|
||||||
|
|||||||
@@ -119,42 +119,3 @@
|
|||||||
color: var(--bg-slate-400) !important;
|
color: var(--bg-slate-400) !important;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.date-time-popover-footer {
|
|
||||||
border-top: 1px solid var(--bg-ink-200);
|
|
||||||
padding: 8px 14px;
|
|
||||||
.timezone-container {
|
|
||||||
&,
|
|
||||||
.timezone {
|
|
||||||
font-family: Inter;
|
|
||||||
font-size: 12px;
|
|
||||||
line-height: 16px;
|
|
||||||
letter-spacing: -0.06px;
|
|
||||||
}
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
color: var(--bg-vanilla-400);
|
|
||||||
gap: 6px;
|
|
||||||
.timezone {
|
|
||||||
cursor: pointer;
|
|
||||||
padding: 0;
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
background-color: transparent;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.timezone-badge {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
padding: 0 4px;
|
|
||||||
border-radius: 2px;
|
|
||||||
background: rgba(171, 189, 255, 0.04);
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
font-size: 12px;
|
|
||||||
font-weight: 400;
|
|
||||||
line-height: 16px;
|
|
||||||
letter-spacing: -0.06px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -15,14 +15,11 @@ import { isValidTimeFormat } from 'lib/getMinMax';
|
|||||||
import { defaultTo, isFunction, noop } from 'lodash-es';
|
import { defaultTo, isFunction, noop } from 'lodash-es';
|
||||||
import debounce from 'lodash-es/debounce';
|
import debounce from 'lodash-es/debounce';
|
||||||
import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
|
import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
import {
|
||||||
ChangeEvent,
|
ChangeEvent,
|
||||||
Dispatch,
|
Dispatch,
|
||||||
SetStateAction,
|
SetStateAction,
|
||||||
useCallback,
|
|
||||||
useEffect,
|
useEffect,
|
||||||
useMemo,
|
|
||||||
useState,
|
useState,
|
||||||
} from 'react';
|
} from 'react';
|
||||||
import { useLocation } from 'react-router-dom';
|
import { useLocation } from 'react-router-dom';
|
||||||
@@ -31,8 +28,6 @@ import { popupContainer } from 'utils/selectPopupContainer';
|
|||||||
import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent';
|
import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent';
|
||||||
|
|
||||||
const maxAllowedMinTimeInMonths = 6;
|
const maxAllowedMinTimeInMonths = 6;
|
||||||
type ViewType = 'datetime' | 'timezone';
|
|
||||||
const DEFAULT_VIEW: ViewType = 'datetime';
|
|
||||||
|
|
||||||
interface CustomTimePickerProps {
|
interface CustomTimePickerProps {
|
||||||
onSelect: (value: string) => void;
|
onSelect: (value: string) => void;
|
||||||
@@ -86,25 +81,6 @@ function CustomTimePicker({
|
|||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const [isInputFocused, setIsInputFocused] = useState(false);
|
const [isInputFocused, setIsInputFocused] = useState(false);
|
||||||
|
|
||||||
const [activeView, setActiveView] = useState<ViewType>(DEFAULT_VIEW);
|
|
||||||
|
|
||||||
const { timezone, browserTimezone } = useTimezone();
|
|
||||||
const activeTimezoneOffset = timezone?.offset;
|
|
||||||
const isTimezoneOverridden = useMemo(
|
|
||||||
() => timezone?.offset !== browserTimezone.offset,
|
|
||||||
[timezone, browserTimezone],
|
|
||||||
);
|
|
||||||
|
|
||||||
const handleViewChange = useCallback(
|
|
||||||
(newView: 'timezone' | 'datetime'): void => {
|
|
||||||
if (activeView !== newView) {
|
|
||||||
setActiveView(newView);
|
|
||||||
}
|
|
||||||
setOpen(!open);
|
|
||||||
},
|
|
||||||
[activeView, open, setOpen],
|
|
||||||
);
|
|
||||||
|
|
||||||
const getSelectedTimeRangeLabel = (
|
const getSelectedTimeRangeLabel = (
|
||||||
selectedTime: string,
|
selectedTime: string,
|
||||||
selectedTimeValue: string,
|
selectedTimeValue: string,
|
||||||
@@ -156,7 +132,6 @@ function CustomTimePicker({
|
|||||||
setOpen(newOpen);
|
setOpen(newOpen);
|
||||||
if (!newOpen) {
|
if (!newOpen) {
|
||||||
setCustomDTPickerVisible?.(false);
|
setCustomDTPickerVisible?.(false);
|
||||||
setActiveView('datetime');
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -306,8 +281,6 @@ function CustomTimePicker({
|
|||||||
handleGoLive={defaultTo(handleGoLive, noop)}
|
handleGoLive={defaultTo(handleGoLive, noop)}
|
||||||
options={items}
|
options={items}
|
||||||
selectedTime={selectedTime}
|
selectedTime={selectedTime}
|
||||||
activeView={activeView}
|
|
||||||
setActiveView={setActiveView}
|
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
content
|
content
|
||||||
@@ -344,23 +317,12 @@ function CustomTimePicker({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
suffix={
|
suffix={
|
||||||
<>
|
<ChevronDown
|
||||||
{!!isTimezoneOverridden && activeTimezoneOffset && (
|
size={14}
|
||||||
<div
|
onClick={(): void => {
|
||||||
className="timezone-badge"
|
setOpen(!open);
|
||||||
onClick={(e): void => {
|
}}
|
||||||
e.stopPropagation();
|
/>
|
||||||
handleViewChange('timezone');
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<span>{activeTimezoneOffset}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<ChevronDown
|
|
||||||
size={14}
|
|
||||||
onClick={(): void => handleViewChange('datetime')}
|
|
||||||
/>
|
|
||||||
</>
|
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
</Popover>
|
</Popover>
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import './CustomTimePicker.styles.scss';
|
import './CustomTimePicker.styles.scss';
|
||||||
|
|
||||||
import { Color } from '@signozhq/design-tokens';
|
|
||||||
import { Button } from 'antd';
|
import { Button } from 'antd';
|
||||||
import cx from 'classnames';
|
import cx from 'classnames';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
@@ -10,13 +9,10 @@ import {
|
|||||||
Option,
|
Option,
|
||||||
RelativeDurationSuggestionOptions,
|
RelativeDurationSuggestionOptions,
|
||||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||||
import { Clock } from 'lucide-react';
|
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { Dispatch, SetStateAction, useMemo } from 'react';
|
import { Dispatch, SetStateAction, useMemo } from 'react';
|
||||||
import { useLocation } from 'react-router-dom';
|
import { useLocation } from 'react-router-dom';
|
||||||
|
|
||||||
import RangePickerModal from './RangePickerModal';
|
import RangePickerModal from './RangePickerModal';
|
||||||
import TimezonePicker from './TimezonePicker';
|
|
||||||
|
|
||||||
interface CustomTimePickerPopoverContentProps {
|
interface CustomTimePickerPopoverContentProps {
|
||||||
options: any[];
|
options: any[];
|
||||||
@@ -30,11 +26,8 @@ interface CustomTimePickerPopoverContentProps {
|
|||||||
onSelectHandler: (label: string, value: string) => void;
|
onSelectHandler: (label: string, value: string) => void;
|
||||||
handleGoLive: () => void;
|
handleGoLive: () => void;
|
||||||
selectedTime: string;
|
selectedTime: string;
|
||||||
activeView: 'datetime' | 'timezone';
|
|
||||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
|
||||||
function CustomTimePickerPopoverContent({
|
function CustomTimePickerPopoverContent({
|
||||||
options,
|
options,
|
||||||
setIsOpen,
|
setIsOpen,
|
||||||
@@ -44,16 +37,12 @@ function CustomTimePickerPopoverContent({
|
|||||||
onSelectHandler,
|
onSelectHandler,
|
||||||
handleGoLive,
|
handleGoLive,
|
||||||
selectedTime,
|
selectedTime,
|
||||||
activeView,
|
|
||||||
setActiveView,
|
|
||||||
}: CustomTimePickerPopoverContentProps): JSX.Element {
|
}: CustomTimePickerPopoverContentProps): JSX.Element {
|
||||||
const { pathname } = useLocation();
|
const { pathname } = useLocation();
|
||||||
|
|
||||||
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
|
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
|
||||||
pathname,
|
pathname,
|
||||||
]);
|
]);
|
||||||
const { timezone } = useTimezone();
|
|
||||||
const activeTimezoneOffset = timezone?.offset;
|
|
||||||
|
|
||||||
function getTimeChips(options: Option[]): JSX.Element {
|
function getTimeChips(options: Option[]): JSX.Element {
|
||||||
return (
|
return (
|
||||||
@@ -74,74 +63,54 @@ function CustomTimePickerPopoverContent({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return activeView === 'datetime' ? (
|
return (
|
||||||
<div>
|
|
||||||
<div className="date-time-popover">
|
|
||||||
<div className="date-time-options">
|
|
||||||
{isLogsExplorerPage && (
|
|
||||||
<Button className="data-time-live" type="text" onClick={handleGoLive}>
|
|
||||||
Live
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
{options.map((option) => (
|
|
||||||
<Button
|
|
||||||
type="text"
|
|
||||||
key={option.label + option.value}
|
|
||||||
onClick={(): void => {
|
|
||||||
onSelectHandler(option.label, option.value);
|
|
||||||
}}
|
|
||||||
className={cx(
|
|
||||||
'date-time-options-btn',
|
|
||||||
customDateTimeVisible
|
|
||||||
? option.value === 'custom' && 'active'
|
|
||||||
: selectedTime === option.value && 'active',
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
{option.label}
|
|
||||||
</Button>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
<div
|
|
||||||
className={cx(
|
|
||||||
'relative-date-time',
|
|
||||||
selectedTime === 'custom' || customDateTimeVisible
|
|
||||||
? 'date-picker'
|
|
||||||
: 'relative-times',
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
{selectedTime === 'custom' || customDateTimeVisible ? (
|
|
||||||
<RangePickerModal
|
|
||||||
setCustomDTPickerVisible={setCustomDTPickerVisible}
|
|
||||||
setIsOpen={setIsOpen}
|
|
||||||
onCustomDateHandler={onCustomDateHandler}
|
|
||||||
selectedTime={selectedTime}
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<div className="relative-times-container">
|
|
||||||
<div className="time-heading">RELATIVE TIMES</div>
|
|
||||||
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="date-time-popover-footer">
|
|
||||||
<div className="timezone-container">
|
|
||||||
<Clock color={Color.BG_VANILLA_400} height={12} width={12} />
|
|
||||||
<span className="timezone-text">You are at</span>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="timezone"
|
|
||||||
onClick={(): void => setActiveView('timezone')}
|
|
||||||
>
|
|
||||||
{activeTimezoneOffset}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="date-time-popover">
|
<div className="date-time-popover">
|
||||||
<TimezonePicker setActiveView={setActiveView} setIsOpen={setIsOpen} />
|
<div className="date-time-options">
|
||||||
|
{isLogsExplorerPage && (
|
||||||
|
<Button className="data-time-live" type="text" onClick={handleGoLive}>
|
||||||
|
Live
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
{options.map((option) => (
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
key={option.label + option.value}
|
||||||
|
onClick={(): void => {
|
||||||
|
onSelectHandler(option.label, option.value);
|
||||||
|
}}
|
||||||
|
className={cx(
|
||||||
|
'date-time-options-btn',
|
||||||
|
customDateTimeVisible
|
||||||
|
? option.value === 'custom' && 'active'
|
||||||
|
: selectedTime === option.value && 'active',
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{option.label}
|
||||||
|
</Button>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
className={cx(
|
||||||
|
'relative-date-time',
|
||||||
|
selectedTime === 'custom' || customDateTimeVisible
|
||||||
|
? 'date-picker'
|
||||||
|
: 'relative-times',
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{selectedTime === 'custom' || customDateTimeVisible ? (
|
||||||
|
<RangePickerModal
|
||||||
|
setCustomDTPickerVisible={setCustomDTPickerVisible}
|
||||||
|
setIsOpen={setIsOpen}
|
||||||
|
onCustomDateHandler={onCustomDateHandler}
|
||||||
|
selectedTime={selectedTime}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="relative-times-container">
|
||||||
|
<div className="time-heading">RELATIVE TIMES</div>
|
||||||
|
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { DatePicker } from 'antd';
|
|||||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||||
import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config';
|
import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||||
import dayjs, { Dayjs } from 'dayjs';
|
import dayjs, { Dayjs } from 'dayjs';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { Dispatch, SetStateAction } from 'react';
|
import { Dispatch, SetStateAction } from 'react';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
@@ -50,8 +49,6 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
|
|||||||
}
|
}
|
||||||
onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER);
|
onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER);
|
||||||
};
|
};
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
return (
|
return (
|
||||||
<div className="custom-date-picker">
|
<div className="custom-date-picker">
|
||||||
<RangePicker
|
<RangePicker
|
||||||
@@ -61,10 +58,7 @@ function RangePickerModal(props: RangePickerModalProps): JSX.Element {
|
|||||||
onOk={onModalOkHandler}
|
onOk={onModalOkHandler}
|
||||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||||
{...(selectedTime === 'custom' && {
|
{...(selectedTime === 'custom' && {
|
||||||
defaultValue: [
|
defaultValue: [dayjs(minTime / 1000000), dayjs(maxTime / 1000000)],
|
||||||
dayjs(minTime / 1000000).tz(timezone.value),
|
|
||||||
dayjs(maxTime / 1000000).tz(timezone.value),
|
|
||||||
],
|
|
||||||
})}
|
})}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,125 +0,0 @@
|
|||||||
// Variables
|
|
||||||
$font-family: 'Inter';
|
|
||||||
$border-color: var(--bg-slate-400);
|
|
||||||
$item-spacing: 8px;
|
|
||||||
|
|
||||||
// Mixins
|
|
||||||
@mixin text-style-base {
|
|
||||||
font-family: $font-family;
|
|
||||||
font-style: normal;
|
|
||||||
font-weight: 400;
|
|
||||||
}
|
|
||||||
|
|
||||||
@mixin flex-center {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.timezone-picker {
|
|
||||||
width: 532px;
|
|
||||||
color: var(--bg-vanilla-400);
|
|
||||||
font-family: $font-family;
|
|
||||||
|
|
||||||
&__search {
|
|
||||||
@include flex-center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 12px 14px;
|
|
||||||
border-bottom: 1px solid $border-color;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__input-container {
|
|
||||||
@include flex-center;
|
|
||||||
gap: 6px;
|
|
||||||
width: -webkit-fill-available;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__input {
|
|
||||||
@include text-style-base;
|
|
||||||
width: 100%;
|
|
||||||
background: transparent;
|
|
||||||
border: none;
|
|
||||||
outline: none;
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 20px;
|
|
||||||
letter-spacing: -0.07px;
|
|
||||||
padding: 0;
|
|
||||||
|
|
||||||
&::placeholder {
|
|
||||||
color: var(--bg-vanilla-400);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
&__esc-key {
|
|
||||||
@include text-style-base;
|
|
||||||
font-size: 8px;
|
|
||||||
color: var(--bg-vanilla-400);
|
|
||||||
letter-spacing: -0.04px;
|
|
||||||
border-radius: 2.286px;
|
|
||||||
border: 1.143px solid var(--bg-ink-200);
|
|
||||||
border-bottom-width: 2.286px;
|
|
||||||
background: var(--bg-ink-400);
|
|
||||||
padding: 0 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__list {
|
|
||||||
max-height: 310px;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__item {
|
|
||||||
@include flex-center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 7.5px 6px 7.5px $item-spacing;
|
|
||||||
margin: 4px $item-spacing;
|
|
||||||
cursor: pointer;
|
|
||||||
background: transparent;
|
|
||||||
border: none;
|
|
||||||
width: -webkit-fill-available;
|
|
||||||
color: var(--bg-vanilla-400);
|
|
||||||
font-family: $font-family;
|
|
||||||
|
|
||||||
&:hover,
|
|
||||||
&.selected {
|
|
||||||
border-radius: 2px;
|
|
||||||
background: rgba(171, 189, 255, 0.04);
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
}
|
|
||||||
|
|
||||||
&.has-divider {
|
|
||||||
position: relative;
|
|
||||||
&::after {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
bottom: -2px;
|
|
||||||
left: -$item-spacing;
|
|
||||||
right: -$item-spacing;
|
|
||||||
border-bottom: 1px solid $border-color;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
&__name {
|
|
||||||
@include text-style-base;
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 20px;
|
|
||||||
letter-spacing: -0.07px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__offset {
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
font-size: 12px;
|
|
||||||
line-height: 16px;
|
|
||||||
letter-spacing: -0.06px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.timezone-name-wrapper {
|
|
||||||
@include flex-center;
|
|
||||||
gap: 6px;
|
|
||||||
|
|
||||||
&__selected-icon {
|
|
||||||
height: 15px;
|
|
||||||
width: 15px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
import './TimezonePicker.styles.scss';
|
|
||||||
|
|
||||||
import { Color } from '@signozhq/design-tokens';
|
|
||||||
import cx from 'classnames';
|
|
||||||
import { TimezonePickerShortcuts } from 'constants/shortcuts/TimezonePickerShortcuts';
|
|
||||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
|
||||||
import { Check, Search } from 'lucide-react';
|
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
|
||||||
Dispatch,
|
|
||||||
SetStateAction,
|
|
||||||
useCallback,
|
|
||||||
useEffect,
|
|
||||||
useState,
|
|
||||||
} from 'react';
|
|
||||||
|
|
||||||
import { Timezone, TIMEZONE_DATA } from './timezoneUtils';
|
|
||||||
|
|
||||||
interface SearchBarProps {
|
|
||||||
value: string;
|
|
||||||
onChange: (value: string) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TimezoneItemProps {
|
|
||||||
timezone: Timezone;
|
|
||||||
isSelected?: boolean;
|
|
||||||
onClick?: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ICON_SIZE = 14;
|
|
||||||
|
|
||||||
function SearchBar({ value, onChange }: SearchBarProps): JSX.Element {
|
|
||||||
return (
|
|
||||||
<div className="timezone-picker__search">
|
|
||||||
<div className="timezone-picker__input-container">
|
|
||||||
<Search color={Color.BG_VANILLA_400} height={ICON_SIZE} width={ICON_SIZE} />
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
className="timezone-picker__input"
|
|
||||||
placeholder="Search timezones..."
|
|
||||||
value={value}
|
|
||||||
onChange={(e): void => onChange(e.target.value)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<kbd className="timezone-picker__esc-key">esc</kbd>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function TimezoneItem({
|
|
||||||
timezone,
|
|
||||||
isSelected = false,
|
|
||||||
onClick,
|
|
||||||
}: TimezoneItemProps): JSX.Element {
|
|
||||||
return (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className={cx('timezone-picker__item', {
|
|
||||||
selected: isSelected,
|
|
||||||
'has-divider': timezone.hasDivider,
|
|
||||||
})}
|
|
||||||
onClick={onClick}
|
|
||||||
>
|
|
||||||
<div className="timezone-name-wrapper">
|
|
||||||
<div className="timezone-name-wrapper__selected-icon">
|
|
||||||
{isSelected && (
|
|
||||||
<Check
|
|
||||||
color={Color.BG_VANILLA_100}
|
|
||||||
height={ICON_SIZE}
|
|
||||||
width={ICON_SIZE}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<div className="timezone-picker__name">{timezone.name}</div>
|
|
||||||
</div>
|
|
||||||
<div className="timezone-picker__offset">{timezone.offset}</div>
|
|
||||||
</button>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
TimezoneItem.defaultProps = {
|
|
||||||
isSelected: false,
|
|
||||||
onClick: undefined,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface TimezonePickerProps {
|
|
||||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
|
||||||
setIsOpen: Dispatch<SetStateAction<boolean>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
function TimezonePicker({
|
|
||||||
setActiveView,
|
|
||||||
setIsOpen,
|
|
||||||
}: TimezonePickerProps): JSX.Element {
|
|
||||||
const [searchTerm, setSearchTerm] = useState('');
|
|
||||||
const { timezone, updateTimezone } = useTimezone();
|
|
||||||
const [selectedTimezone, setSelectedTimezone] = useState<string>(
|
|
||||||
timezone?.name ?? TIMEZONE_DATA[0].name,
|
|
||||||
);
|
|
||||||
|
|
||||||
const getFilteredTimezones = useCallback((searchTerm: string): Timezone[] => {
|
|
||||||
const normalizedSearch = searchTerm.toLowerCase();
|
|
||||||
return TIMEZONE_DATA.filter(
|
|
||||||
(tz) =>
|
|
||||||
tz.name.toLowerCase().includes(normalizedSearch) ||
|
|
||||||
tz.offset.toLowerCase().includes(normalizedSearch) ||
|
|
||||||
tz.searchIndex.toLowerCase().includes(normalizedSearch),
|
|
||||||
);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const handleCloseTimezonePicker = useCallback(() => {
|
|
||||||
setActiveView('datetime');
|
|
||||||
}, [setActiveView]);
|
|
||||||
|
|
||||||
const handleTimezoneSelect = useCallback(
|
|
||||||
(timezone: Timezone) => {
|
|
||||||
setSelectedTimezone(timezone.name);
|
|
||||||
updateTimezone(timezone);
|
|
||||||
handleCloseTimezonePicker();
|
|
||||||
setIsOpen(false);
|
|
||||||
},
|
|
||||||
[handleCloseTimezonePicker, setIsOpen, updateTimezone],
|
|
||||||
);
|
|
||||||
|
|
||||||
// Register keyboard shortcuts
|
|
||||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
registerShortcut(
|
|
||||||
TimezonePickerShortcuts.CloseTimezonePicker,
|
|
||||||
handleCloseTimezonePicker,
|
|
||||||
);
|
|
||||||
|
|
||||||
return (): void => {
|
|
||||||
deregisterShortcut(TimezonePickerShortcuts.CloseTimezonePicker);
|
|
||||||
};
|
|
||||||
}, [deregisterShortcut, handleCloseTimezonePicker, registerShortcut]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="timezone-picker">
|
|
||||||
<SearchBar value={searchTerm} onChange={setSearchTerm} />
|
|
||||||
<div className="timezone-picker__list">
|
|
||||||
{getFilteredTimezones(searchTerm).map((timezone) => (
|
|
||||||
<TimezoneItem
|
|
||||||
key={timezone.value}
|
|
||||||
timezone={timezone}
|
|
||||||
isSelected={timezone.name === selectedTimezone}
|
|
||||||
onClick={(): void => handleTimezoneSelect(timezone)}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default TimezonePicker;
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
import { getTimeZones } from '@vvo/tzdb';
|
|
||||||
import dayjs from 'dayjs';
|
|
||||||
import timezone from 'dayjs/plugin/timezone';
|
|
||||||
import utc from 'dayjs/plugin/utc';
|
|
||||||
|
|
||||||
dayjs.extend(utc);
|
|
||||||
dayjs.extend(timezone);
|
|
||||||
|
|
||||||
export interface Timezone {
|
|
||||||
name: string;
|
|
||||||
value: string;
|
|
||||||
offset: string;
|
|
||||||
searchIndex: string;
|
|
||||||
hasDivider?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Constants
|
|
||||||
const TIMEZONE_TYPES = {
|
|
||||||
BROWSER: 'BROWSER',
|
|
||||||
UTC: 'UTC',
|
|
||||||
STANDARD: 'STANDARD',
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
type TimezoneType = typeof TIMEZONE_TYPES[keyof typeof TIMEZONE_TYPES];
|
|
||||||
|
|
||||||
const UTC_TIMEZONE: Timezone = {
|
|
||||||
name: 'Coordinated Universal Time — UTC, GMT',
|
|
||||||
value: 'UTC',
|
|
||||||
offset: 'UTC',
|
|
||||||
searchIndex: 'UTC',
|
|
||||||
hasDivider: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Helper functions
|
|
||||||
const isValidTimezone = (tzName: string): boolean => {
|
|
||||||
try {
|
|
||||||
dayjs.tz(dayjs(), tzName);
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const formatOffset = (offsetMinutes: number): string => {
|
|
||||||
if (offsetMinutes === 0) return 'UTC';
|
|
||||||
|
|
||||||
const hours = Math.floor(Math.abs(offsetMinutes) / 60);
|
|
||||||
const minutes = Math.abs(offsetMinutes) % 60;
|
|
||||||
const sign = offsetMinutes > 0 ? '+' : '-';
|
|
||||||
|
|
||||||
return `UTC ${sign} ${hours}${
|
|
||||||
minutes ? `:${minutes.toString().padStart(2, '0')}` : ':00'
|
|
||||||
}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
const createTimezoneEntry = (
|
|
||||||
name: string,
|
|
||||||
offsetMinutes: number,
|
|
||||||
type: TimezoneType = TIMEZONE_TYPES.STANDARD,
|
|
||||||
hasDivider = false,
|
|
||||||
): Timezone => {
|
|
||||||
const offset = formatOffset(offsetMinutes);
|
|
||||||
let value = name;
|
|
||||||
let displayName = name;
|
|
||||||
|
|
||||||
switch (type) {
|
|
||||||
case TIMEZONE_TYPES.BROWSER:
|
|
||||||
displayName = `Browser time — ${name}`;
|
|
||||||
value = name;
|
|
||||||
break;
|
|
||||||
case TIMEZONE_TYPES.UTC:
|
|
||||||
displayName = 'Coordinated Universal Time — UTC, GMT';
|
|
||||||
value = 'UTC';
|
|
||||||
break;
|
|
||||||
case TIMEZONE_TYPES.STANDARD:
|
|
||||||
displayName = name;
|
|
||||||
value = name;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
console.error(`Invalid timezone type: ${type}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: displayName,
|
|
||||||
value,
|
|
||||||
offset,
|
|
||||||
searchIndex: offset.replace(/ /g, ''),
|
|
||||||
...(hasDivider && { hasDivider }),
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
const getOffsetByTimezone = (timezone: string): number => {
|
|
||||||
const dayjsTimezone = dayjs().tz(timezone);
|
|
||||||
return dayjsTimezone.utcOffset();
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getBrowserTimezone = (): Timezone => {
|
|
||||||
const browserTz = dayjs.tz.guess();
|
|
||||||
const browserOffset = getOffsetByTimezone(browserTz);
|
|
||||||
return createTimezoneEntry(browserTz, browserOffset, TIMEZONE_TYPES.BROWSER);
|
|
||||||
};
|
|
||||||
|
|
||||||
const filterAndSortTimezones = (
|
|
||||||
allTimezones: ReturnType<typeof getTimeZones>,
|
|
||||||
browserTzName?: string,
|
|
||||||
): Timezone[] =>
|
|
||||||
allTimezones
|
|
||||||
.filter(
|
|
||||||
(tz) =>
|
|
||||||
!tz.name.startsWith('Etc/') &&
|
|
||||||
isValidTimezone(tz.name) &&
|
|
||||||
tz.name !== browserTzName,
|
|
||||||
)
|
|
||||||
.sort((a, b) => a.name.localeCompare(b.name))
|
|
||||||
.map((tz) => createTimezoneEntry(tz.name, tz.rawOffsetInMinutes));
|
|
||||||
|
|
||||||
const generateTimezoneData = (): Timezone[] => {
|
|
||||||
const allTimezones = getTimeZones();
|
|
||||||
const timezones: Timezone[] = [];
|
|
||||||
|
|
||||||
// Add browser timezone
|
|
||||||
const browserTzObject = getBrowserTimezone();
|
|
||||||
timezones.push(browserTzObject);
|
|
||||||
|
|
||||||
// Add UTC timezone with divider
|
|
||||||
timezones.push(UTC_TIMEZONE);
|
|
||||||
|
|
||||||
// Add remaining timezones
|
|
||||||
timezones.push(...filterAndSortTimezones(allTimezones, browserTzObject.value));
|
|
||||||
|
|
||||||
return timezones;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTimezoneObjectByTimezoneString = (
|
|
||||||
timezone: string,
|
|
||||||
): Timezone => {
|
|
||||||
const utcOffset = getOffsetByTimezone(timezone);
|
|
||||||
|
|
||||||
return createTimezoneEntry(timezone, utcOffset);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const TIMEZONE_DATA = generateTimezoneData();
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import {
|
import {
|
||||||
_adapters,
|
|
||||||
BarController,
|
BarController,
|
||||||
BarElement,
|
BarElement,
|
||||||
CategoryScale,
|
CategoryScale,
|
||||||
@@ -19,10 +18,8 @@ import {
|
|||||||
} from 'chart.js';
|
} from 'chart.js';
|
||||||
import annotationPlugin from 'chartjs-plugin-annotation';
|
import annotationPlugin from 'chartjs-plugin-annotation';
|
||||||
import { generateGridTitle } from 'container/GridPanelSwitch/utils';
|
import { generateGridTitle } from 'container/GridPanelSwitch/utils';
|
||||||
import dayjs from 'dayjs';
|
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import isEqual from 'lodash-es/isEqual';
|
import isEqual from 'lodash-es/isEqual';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
import {
|
||||||
forwardRef,
|
forwardRef,
|
||||||
memo,
|
memo,
|
||||||
@@ -65,17 +62,6 @@ Chart.register(
|
|||||||
|
|
||||||
Tooltip.positioners.custom = TooltipPositionHandler;
|
Tooltip.positioners.custom = TooltipPositionHandler;
|
||||||
|
|
||||||
// Map of Chart.js time formats to dayjs format strings
|
|
||||||
const formatMap = {
|
|
||||||
'HH:mm:ss': 'HH:mm:ss',
|
|
||||||
'HH:mm': 'HH:mm',
|
|
||||||
'MM/DD HH:mm': 'MM/DD HH:mm',
|
|
||||||
'MM/dd HH:mm': 'MM/DD HH:mm',
|
|
||||||
'MM/DD': 'MM/DD',
|
|
||||||
'YY-MM': 'YY-MM',
|
|
||||||
YY: 'YY',
|
|
||||||
};
|
|
||||||
|
|
||||||
const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
@@ -94,13 +80,11 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
|||||||
dragSelectColor,
|
dragSelectColor,
|
||||||
},
|
},
|
||||||
ref,
|
ref,
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
|
||||||
): JSX.Element => {
|
): JSX.Element => {
|
||||||
const nearestDatasetIndex = useRef<null | number>(null);
|
const nearestDatasetIndex = useRef<null | number>(null);
|
||||||
const chartRef = useRef<HTMLCanvasElement>(null);
|
const chartRef = useRef<HTMLCanvasElement>(null);
|
||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
const gridTitle = useMemo(() => generateGridTitle(title), [title]);
|
const gridTitle = useMemo(() => generateGridTitle(title), [title]);
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const currentTheme = isDarkMode ? 'dark' : 'light';
|
const currentTheme = isDarkMode ? 'dark' : 'light';
|
||||||
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
|
const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data
|
||||||
@@ -128,22 +112,6 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
|||||||
return 'rgba(231,233,237,0.8)';
|
return 'rgba(231,233,237,0.8)';
|
||||||
}, [currentTheme]);
|
}, [currentTheme]);
|
||||||
|
|
||||||
// Override Chart.js date adapter to use dayjs with timezone support
|
|
||||||
useEffect(() => {
|
|
||||||
_adapters._date.override({
|
|
||||||
format(time: number | Date, fmt: string) {
|
|
||||||
const dayjsTime = dayjs(time).tz(timezone?.value);
|
|
||||||
const format = formatMap[fmt as keyof typeof formatMap];
|
|
||||||
if (!format) {
|
|
||||||
console.warn(`Missing datetime format for ${fmt}`);
|
|
||||||
return dayjsTime.format('YYYY-MM-DD HH:mm:ss'); // fallback format
|
|
||||||
}
|
|
||||||
|
|
||||||
return dayjsTime.format(format);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}, [timezone]);
|
|
||||||
|
|
||||||
const buildChart = useCallback(() => {
|
const buildChart = useCallback(() => {
|
||||||
if (lineChartRef.current !== undefined) {
|
if (lineChartRef.current !== undefined) {
|
||||||
lineChartRef.current.destroy();
|
lineChartRef.current.destroy();
|
||||||
@@ -164,7 +132,6 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
|||||||
isStacked,
|
isStacked,
|
||||||
onClickHandler,
|
onClickHandler,
|
||||||
data,
|
data,
|
||||||
timezone,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const chartHasData = hasData(data);
|
const chartHasData = hasData(data);
|
||||||
@@ -199,7 +166,6 @@ const Graph = forwardRef<ToggleGraphProps | undefined, GraphProps>(
|
|||||||
isStacked,
|
isStacked,
|
||||||
onClickHandler,
|
onClickHandler,
|
||||||
data,
|
data,
|
||||||
timezone,
|
|
||||||
name,
|
name,
|
||||||
type,
|
type,
|
||||||
]);
|
]);
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { Chart, ChartConfiguration, ChartData, Color } from 'chart.js';
|
import { Chart, ChartConfiguration, ChartData, Color } from 'chart.js';
|
||||||
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
|
import * as chartjsAdapter from 'chartjs-adapter-date-fns';
|
||||||
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
|
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
import { MutableRefObject } from 'react';
|
import { MutableRefObject } from 'react';
|
||||||
|
|
||||||
@@ -51,7 +50,6 @@ export const getGraphOptions = (
|
|||||||
isStacked: boolean | undefined,
|
isStacked: boolean | undefined,
|
||||||
onClickHandler: GraphOnClickHandler | undefined,
|
onClickHandler: GraphOnClickHandler | undefined,
|
||||||
data: ChartData,
|
data: ChartData,
|
||||||
timezone: Timezone,
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
): CustomChartOptions => ({
|
): CustomChartOptions => ({
|
||||||
animation: {
|
animation: {
|
||||||
@@ -99,7 +97,7 @@ export const getGraphOptions = (
|
|||||||
callbacks: {
|
callbacks: {
|
||||||
title(context): string | string[] {
|
title(context): string | string[] {
|
||||||
const date = dayjs(context[0].parsed.x);
|
const date = dayjs(context[0].parsed.x);
|
||||||
return date.tz(timezone?.value).format('MMM DD, YYYY, HH:mm:ss');
|
return date.format('MMM DD, YYYY, HH:mm:ss');
|
||||||
},
|
},
|
||||||
label(context): string | string[] {
|
label(context): string | string[] {
|
||||||
let label = context.dataset.label || '';
|
let label = context.dataset.label || '';
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ import LogDetail from 'components/LogDetail';
|
|||||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||||
import { unescapeString } from 'container/LogDetailedView/utils';
|
import { unescapeString } from 'container/LogDetailedView/utils';
|
||||||
import { FontSize } from 'container/OptionsMenu/types';
|
import { FontSize } from 'container/OptionsMenu/types';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
import dompurify from 'dompurify';
|
import dompurify from 'dompurify';
|
||||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
// utils
|
// utils
|
||||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useCallback, useMemo, useState } from 'react';
|
import { useCallback, useMemo, useState } from 'react';
|
||||||
// interfaces
|
// interfaces
|
||||||
import { IField } from 'types/api/logs/fields';
|
import { IField } from 'types/api/logs/fields';
|
||||||
@@ -174,20 +174,12 @@ function ListLogView({
|
|||||||
[selectedFields],
|
[selectedFields],
|
||||||
);
|
);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const timestampValue = useMemo(
|
const timestampValue = useMemo(
|
||||||
() =>
|
() =>
|
||||||
typeof flattenLogData.timestamp === 'string'
|
typeof flattenLogData.timestamp === 'string'
|
||||||
? formatTimezoneAdjustedTimestamp(
|
? dayjs(flattenLogData.timestamp).format('YYYY-MM-DD HH:mm:ss.SSS')
|
||||||
flattenLogData.timestamp,
|
: dayjs(flattenLogData.timestamp / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS'),
|
||||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
[flattenLogData.timestamp],
|
||||||
)
|
|
||||||
: formatTimezoneAdjustedTimestamp(
|
|
||||||
flattenLogData.timestamp / 1e6,
|
|
||||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
|
||||||
),
|
|
||||||
[flattenLogData.timestamp, formatTimezoneAdjustedTimestamp],
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const logType = getLogIndicatorType(logData);
|
const logType = getLogIndicatorType(logData);
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import LogDetail from 'components/LogDetail';
|
|||||||
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
|
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
|
||||||
import { unescapeString } from 'container/LogDetailedView/utils';
|
import { unescapeString } from 'container/LogDetailedView/utils';
|
||||||
import LogsExplorerContext from 'container/LogsExplorerContext';
|
import LogsExplorerContext from 'container/LogsExplorerContext';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
import dompurify from 'dompurify';
|
import dompurify from 'dompurify';
|
||||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||||
@@ -13,7 +14,6 @@ import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
|||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||||
import { isEmpty, isNumber, isUndefined } from 'lodash-es';
|
import { isEmpty, isNumber, isUndefined } from 'lodash-es';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
import {
|
||||||
KeyboardEvent,
|
KeyboardEvent,
|
||||||
MouseEvent,
|
MouseEvent,
|
||||||
@@ -89,24 +89,16 @@ function RawLogView({
|
|||||||
attributesText += ' | ';
|
attributesText += ' | ';
|
||||||
}
|
}
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const text = useMemo(() => {
|
const text = useMemo(() => {
|
||||||
const date =
|
const date =
|
||||||
typeof data.timestamp === 'string'
|
typeof data.timestamp === 'string'
|
||||||
? formatTimezoneAdjustedTimestamp(data.timestamp, 'YYYY-MM-DD HH:mm:ss.SSS')
|
? dayjs(data.timestamp)
|
||||||
: formatTimezoneAdjustedTimestamp(
|
: dayjs(data.timestamp / 1e6);
|
||||||
data.timestamp / 1e6,
|
|
||||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
|
||||||
);
|
|
||||||
|
|
||||||
return `${date} | ${attributesText} ${data.body}`;
|
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
|
||||||
}, [
|
data.body
|
||||||
data.timestamp,
|
}`;
|
||||||
data.body,
|
}, [data.timestamp, data.body, attributesText]);
|
||||||
attributesText,
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const handleClickExpand = useCallback(() => {
|
const handleClickExpand = useCallback(() => {
|
||||||
if (activeContextLog || isReadOnly) return;
|
if (activeContextLog || isReadOnly) return;
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ import { Typography } from 'antd';
|
|||||||
import { ColumnsType } from 'antd/es/table';
|
import { ColumnsType } from 'antd/es/table';
|
||||||
import cx from 'classnames';
|
import cx from 'classnames';
|
||||||
import { unescapeString } from 'container/LogDetailedView/utils';
|
import { unescapeString } from 'container/LogDetailedView/utils';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
import dompurify from 'dompurify';
|
import dompurify from 'dompurify';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
||||||
|
|
||||||
@@ -44,8 +44,6 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
|||||||
logs,
|
logs,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||||
.filter((e) => e.name !== 'id')
|
.filter((e) => e.name !== 'id')
|
||||||
@@ -83,11 +81,8 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
|||||||
render: (field, item): ColumnTypeRender<Record<string, unknown>> => {
|
render: (field, item): ColumnTypeRender<Record<string, unknown>> => {
|
||||||
const date =
|
const date =
|
||||||
typeof field === 'string'
|
typeof field === 'string'
|
||||||
? formatTimezoneAdjustedTimestamp(field, 'YYYY-MM-DD HH:mm:ss.SSS')
|
? dayjs(field).format('YYYY-MM-DD HH:mm:ss.SSS')
|
||||||
: formatTimezoneAdjustedTimestamp(
|
: dayjs(field / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
|
||||||
field / 1e6,
|
|
||||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
children: (
|
children: (
|
||||||
<div className="table-timestamp">
|
<div className="table-timestamp">
|
||||||
@@ -130,15 +125,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
|||||||
},
|
},
|
||||||
...(appendTo === 'end' ? fieldColumns : []),
|
...(appendTo === 'end' ? fieldColumns : []),
|
||||||
];
|
];
|
||||||
}, [
|
}, [fields, isListViewPanel, appendTo, isDarkMode, linesPerRow, fontSize]);
|
||||||
fields,
|
|
||||||
isListViewPanel,
|
|
||||||
appendTo,
|
|
||||||
isDarkMode,
|
|
||||||
linesPerRow,
|
|
||||||
fontSize,
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
]);
|
|
||||||
|
|
||||||
return { columns, dataSource: flattenLogData };
|
return { columns, dataSource: flattenLogData };
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
import { Typography } from 'antd';
|
import { Typography } from 'antd';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
|
||||||
|
import getFormattedDate from 'lib/getFormatedDate';
|
||||||
|
|
||||||
function Time({ CreatedOrUpdateTime }: DateProps): JSX.Element {
|
function Time({ CreatedOrUpdateTime }: DateProps): JSX.Element {
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
const time = new Date(CreatedOrUpdateTime);
|
const time = new Date(CreatedOrUpdateTime);
|
||||||
const timeString = formatTimezoneAdjustedTimestamp(
|
const date = getFormattedDate(time);
|
||||||
time,
|
const timeString = `${date} ${convertDateToAmAndPm(time)}`;
|
||||||
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
|
|
||||||
);
|
|
||||||
return <Typography>{timeString}</Typography>;
|
return <Typography>{timeString}</Typography>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -21,5 +21,4 @@ export enum LOCALSTORAGE {
|
|||||||
THEME_ANALYTICS_V1 = 'THEME_ANALYTICS_V1',
|
THEME_ANALYTICS_V1 = 'THEME_ANALYTICS_V1',
|
||||||
LAST_USED_SAVED_VIEWS = 'LAST_USED_SAVED_VIEWS',
|
LAST_USED_SAVED_VIEWS = 'LAST_USED_SAVED_VIEWS',
|
||||||
SHOW_LOGS_QUICK_FILTERS = 'SHOW_LOGS_QUICK_FILTERS',
|
SHOW_LOGS_QUICK_FILTERS = 'SHOW_LOGS_QUICK_FILTERS',
|
||||||
PREFERRED_TIMEZONE = 'PREFERRED_TIMEZONE',
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
export const TimezonePickerShortcuts = {
|
|
||||||
CloseTimezonePicker: 'escape',
|
|
||||||
};
|
|
||||||
@@ -7,7 +7,6 @@ import useUrlQuery from 'hooks/useUrlQuery';
|
|||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
|
import heatmapPlugin from 'lib/uPlotLib/plugins/heatmapPlugin';
|
||||||
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
|
import timelinePlugin from 'lib/uPlotLib/plugins/timelinePlugin';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useMemo, useRef } from 'react';
|
import { useMemo, useRef } from 'react';
|
||||||
import { useDispatch } from 'react-redux';
|
import { useDispatch } from 'react-redux';
|
||||||
import { UpdateTimeInterval } from 'store/actions';
|
import { UpdateTimeInterval } from 'store/actions';
|
||||||
@@ -49,7 +48,6 @@ function HorizontalTimelineGraph({
|
|||||||
|
|
||||||
const urlQuery = useUrlQuery();
|
const urlQuery = useUrlQuery();
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const options: uPlot.Options = useMemo(
|
const options: uPlot.Options = useMemo(
|
||||||
() => ({
|
() => ({
|
||||||
@@ -118,18 +116,8 @@ function HorizontalTimelineGraph({
|
|||||||
}),
|
}),
|
||||||
]
|
]
|
||||||
: [],
|
: [],
|
||||||
|
|
||||||
tzDate: (timestamp: number): Date =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
}),
|
}),
|
||||||
[
|
[width, isDarkMode, transformedData.length, urlQuery, dispatch],
|
||||||
width,
|
|
||||||
isDarkMode,
|
|
||||||
transformedData.length,
|
|
||||||
urlQuery,
|
|
||||||
dispatch,
|
|
||||||
timezone?.value,
|
|
||||||
],
|
|
||||||
);
|
);
|
||||||
return <Uplot data={transformedData} options={options} />;
|
return <Uplot data={transformedData} options={options} />;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import {
|
|||||||
useGetAlertRuleDetailsTimelineTable,
|
useGetAlertRuleDetailsTimelineTable,
|
||||||
useTimelineTable,
|
useTimelineTable,
|
||||||
} from 'pages/AlertDetails/hooks';
|
} from 'pages/AlertDetails/hooks';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useMemo, useState } from 'react';
|
import { useMemo, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
@@ -40,8 +39,6 @@ function TimelineTable(): JSX.Element {
|
|||||||
|
|
||||||
const { t } = useTranslation('common');
|
const { t } = useTranslation('common');
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
if (isError || !isValidRuleId || !ruleId) {
|
if (isError || !isValidRuleId || !ruleId) {
|
||||||
return <div>{t('something_went_wrong')}</div>;
|
return <div>{t('something_went_wrong')}</div>;
|
||||||
}
|
}
|
||||||
@@ -54,7 +51,6 @@ function TimelineTable(): JSX.Element {
|
|||||||
filters,
|
filters,
|
||||||
labels: labels ?? {},
|
labels: labels ?? {},
|
||||||
setFilters,
|
setFilters,
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
})}
|
})}
|
||||||
dataSource={timelineData}
|
dataSource={timelineData}
|
||||||
pagination={paginationConfig}
|
pagination={paginationConfig}
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import ClientSideQBSearch, {
|
|||||||
import { ConditionalAlertPopover } from 'container/AlertHistory/AlertPopover/AlertPopover';
|
import { ConditionalAlertPopover } from 'container/AlertHistory/AlertPopover/AlertPopover';
|
||||||
import { transformKeyValuesToAttributeValuesMap } from 'container/QueryBuilder/filters/utils';
|
import { transformKeyValuesToAttributeValuesMap } from 'container/QueryBuilder/filters/utils';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
|
||||||
import { Search } from 'lucide-react';
|
import { Search } from 'lucide-react';
|
||||||
import AlertLabels, {
|
import AlertLabels, {
|
||||||
AlertLabelsProps,
|
AlertLabelsProps,
|
||||||
@@ -17,6 +16,7 @@ import AlertState from 'pages/AlertDetails/AlertHeader/AlertState/AlertState';
|
|||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
|
import { AlertRuleTimelineTableResponse } from 'types/api/alerts/def';
|
||||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
|
import { formatEpochTimestamp } from 'utils/timeUtils';
|
||||||
|
|
||||||
const transformLabelsToQbKeys = (
|
const transformLabelsToQbKeys = (
|
||||||
labels: AlertRuleTimelineTableResponse['labels'],
|
labels: AlertRuleTimelineTableResponse['labels'],
|
||||||
@@ -74,15 +74,10 @@ export const timelineTableColumns = ({
|
|||||||
filters,
|
filters,
|
||||||
labels,
|
labels,
|
||||||
setFilters,
|
setFilters,
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
}: {
|
}: {
|
||||||
filters: TagFilter;
|
filters: TagFilter;
|
||||||
labels: AlertLabelsProps['labels'];
|
labels: AlertLabelsProps['labels'];
|
||||||
setFilters: (filters: TagFilter) => void;
|
setFilters: (filters: TagFilter) => void;
|
||||||
formatTimezoneAdjustedTimestamp: (
|
|
||||||
input: TimestampInput,
|
|
||||||
format?: string,
|
|
||||||
) => string;
|
|
||||||
}): ColumnsType<AlertRuleTimelineTableResponse> => [
|
}): ColumnsType<AlertRuleTimelineTableResponse> => [
|
||||||
{
|
{
|
||||||
title: 'STATE',
|
title: 'STATE',
|
||||||
@@ -111,9 +106,7 @@ export const timelineTableColumns = ({
|
|||||||
dataIndex: 'unixMilli',
|
dataIndex: 'unixMilli',
|
||||||
width: 200,
|
width: 200,
|
||||||
render: (value): JSX.Element => (
|
render: (value): JSX.Element => (
|
||||||
<div className="alert-rule__created-at">
|
<div className="alert-rule__created-at">{formatEpochTimestamp(value)}</div>
|
||||||
{formatTimezoneAdjustedTimestamp(value, 'MMM D, YYYY ⎯ HH:mm:ss')}
|
|
||||||
</div>
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -17,15 +17,14 @@ import getAll from 'api/errors/getAll';
|
|||||||
import getErrorCounts from 'api/errors/getErrorCounts';
|
import getErrorCounts from 'api/errors/getErrorCounts';
|
||||||
import { ResizeTable } from 'components/ResizeTable';
|
import { ResizeTable } from 'components/ResizeTable';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
import useResourceAttribute from 'hooks/useResourceAttribute';
|
import useResourceAttribute from 'hooks/useResourceAttribute';
|
||||||
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
|
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
|
||||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
|
||||||
import useUrlQuery from 'hooks/useUrlQuery';
|
import useUrlQuery from 'hooks/useUrlQuery';
|
||||||
import createQueryParams from 'lib/createQueryParams';
|
import createQueryParams from 'lib/createQueryParams';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { isUndefined } from 'lodash-es';
|
import { isUndefined } from 'lodash-es';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useQueries } from 'react-query';
|
import { useQueries } from 'react-query';
|
||||||
@@ -156,16 +155,8 @@ function AllErrors(): JSX.Element {
|
|||||||
}
|
}
|
||||||
}, [data?.error, data?.payload, t, notifications]);
|
}, [data?.error, data?.payload, t, notifications]);
|
||||||
|
|
||||||
const getDateValue = (
|
const getDateValue = (value: string): JSX.Element => (
|
||||||
value: string,
|
<Typography>{dayjs(value).format('DD/MM/YYYY HH:mm:ss A')}</Typography>
|
||||||
formatTimezoneAdjustedTimestamp: (
|
|
||||||
input: TimestampInput,
|
|
||||||
format?: string,
|
|
||||||
) => string,
|
|
||||||
): JSX.Element => (
|
|
||||||
<Typography>
|
|
||||||
{formatTimezoneAdjustedTimestamp(value, 'DD/MM/YYYY hh:mm:ss A')}
|
|
||||||
</Typography>
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const filterIcon = useCallback(() => <SearchOutlined />, []);
|
const filterIcon = useCallback(() => <SearchOutlined />, []);
|
||||||
@@ -292,8 +283,6 @@ function AllErrors(): JSX.Element {
|
|||||||
[filterIcon, filterDropdownWrapper],
|
[filterIcon, filterDropdownWrapper],
|
||||||
);
|
);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const columns: ColumnsType<Exception> = [
|
const columns: ColumnsType<Exception> = [
|
||||||
{
|
{
|
||||||
title: 'Exception Type',
|
title: 'Exception Type',
|
||||||
@@ -353,8 +342,7 @@ function AllErrors(): JSX.Element {
|
|||||||
dataIndex: 'lastSeen',
|
dataIndex: 'lastSeen',
|
||||||
width: 80,
|
width: 80,
|
||||||
key: 'lastSeen',
|
key: 'lastSeen',
|
||||||
render: (value): JSX.Element =>
|
render: getDateValue,
|
||||||
getDateValue(value, formatTimezoneAdjustedTimestamp),
|
|
||||||
sorter: true,
|
sorter: true,
|
||||||
defaultSortOrder: getDefaultOrder(
|
defaultSortOrder: getDefaultOrder(
|
||||||
getUpdatedParams,
|
getUpdatedParams,
|
||||||
@@ -367,8 +355,7 @@ function AllErrors(): JSX.Element {
|
|||||||
dataIndex: 'firstSeen',
|
dataIndex: 'firstSeen',
|
||||||
width: 80,
|
width: 80,
|
||||||
key: 'firstSeen',
|
key: 'firstSeen',
|
||||||
render: (value): JSX.Element =>
|
render: getDateValue,
|
||||||
getDateValue(value, formatTimezoneAdjustedTimestamp),
|
|
||||||
sorter: true,
|
sorter: true,
|
||||||
defaultSortOrder: getDefaultOrder(
|
defaultSortOrder: getDefaultOrder(
|
||||||
getUpdatedParams,
|
getUpdatedParams,
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ import getAxes from 'lib/uPlotLib/utils/getAxes';
|
|||||||
import { getUplotChartDataForAnomalyDetection } from 'lib/uPlotLib/utils/getUplotChartData';
|
import { getUplotChartDataForAnomalyDetection } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||||
import { getYAxisScaleForAnomalyDetection } from 'lib/uPlotLib/utils/getYAxisScale';
|
import { getYAxisScaleForAnomalyDetection } from 'lib/uPlotLib/utils/getYAxisScale';
|
||||||
import { LineChart } from 'lucide-react';
|
import { LineChart } from 'lucide-react';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useEffect, useRef, useState } from 'react';
|
import { useEffect, useRef, useState } from 'react';
|
||||||
import uPlot from 'uplot';
|
import uPlot from 'uplot';
|
||||||
|
|
||||||
@@ -149,12 +148,10 @@ function AnomalyAlertEvaluationView({
|
|||||||
]
|
]
|
||||||
: [];
|
: [];
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
width: dimensions.width,
|
width: dimensions.width,
|
||||||
height: dimensions.height - 36,
|
height: dimensions.height - 36,
|
||||||
plugins: [bandsPlugin, tooltipPlugin(isDarkMode, timezone?.value)],
|
plugins: [bandsPlugin, tooltipPlugin(isDarkMode)],
|
||||||
focus: {
|
focus: {
|
||||||
alpha: 0.3,
|
alpha: 0.3,
|
||||||
},
|
},
|
||||||
@@ -259,8 +256,6 @@ function AnomalyAlertEvaluationView({
|
|||||||
show: true,
|
show: true,
|
||||||
},
|
},
|
||||||
axes: getAxes(isDarkMode, yAxisUnit),
|
axes: getAxes(isDarkMode, yAxisUnit),
|
||||||
tzDate: (timestamp: number): Date =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSearch = (searchText: string): void => {
|
const handleSearch = (searchText: string): void => {
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
import { themeColors } from 'constants/theme';
|
import { themeColors } from 'constants/theme';
|
||||||
import dayjs from 'dayjs';
|
|
||||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||||
|
|
||||||
const tooltipPlugin = (
|
const tooltipPlugin = (
|
||||||
isDarkMode: boolean,
|
isDarkMode: boolean,
|
||||||
timezone: string,
|
|
||||||
): { hooks: { init: (u: any) => void } } => {
|
): { hooks: { init: (u: any) => void } } => {
|
||||||
let tooltip: HTMLDivElement;
|
let tooltip: HTMLDivElement;
|
||||||
const tooltipLeftOffset = 10;
|
const tooltipLeftOffset = 10;
|
||||||
@@ -19,7 +17,7 @@ const tooltipPlugin = (
|
|||||||
return value.toFixed(3);
|
return value.toFixed(3);
|
||||||
}
|
}
|
||||||
if (value instanceof Date) {
|
if (value instanceof Date) {
|
||||||
return dayjs(value).tz(timezone).format('MM/DD/YYYY, h:mm:ss A');
|
return value.toLocaleString();
|
||||||
}
|
}
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return 'N/A';
|
return 'N/A';
|
||||||
|
|||||||
@@ -6,12 +6,12 @@ import getNextPrevId from 'api/errors/getNextPrevId';
|
|||||||
import Editor from 'components/Editor';
|
import Editor from 'components/Editor';
|
||||||
import { ResizeTable } from 'components/ResizeTable';
|
import { ResizeTable } from 'components/ResizeTable';
|
||||||
import { getNanoSeconds } from 'container/AllError/utils';
|
import { getNanoSeconds } from 'container/AllError/utils';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
import createQueryParams from 'lib/createQueryParams';
|
import createQueryParams from 'lib/createQueryParams';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { isUndefined } from 'lodash-es';
|
import { isUndefined } from 'lodash-es';
|
||||||
import { urlKey } from 'pages/ErrorDetails/utils';
|
import { urlKey } from 'pages/ErrorDetails/utils';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useQuery } from 'react-query';
|
import { useQuery } from 'react-query';
|
||||||
@@ -103,6 +103,8 @@ function ErrorDetails(props: ErrorDetailsProps): JSX.Element {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const timeStamp = dayjs(errorDetail.timestamp);
|
||||||
|
|
||||||
const data: { key: string; value: string }[] = Object.keys(errorDetail)
|
const data: { key: string; value: string }[] = Object.keys(errorDetail)
|
||||||
.filter((e) => !keyToExclude.includes(e))
|
.filter((e) => !keyToExclude.includes(e))
|
||||||
.map((key) => ({
|
.map((key) => ({
|
||||||
@@ -134,8 +136,6 @@ function ErrorDetails(props: ErrorDetailsProps): JSX.Element {
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [data]);
|
}, [data]);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Typography>{errorDetail.exceptionType}</Typography>
|
<Typography>{errorDetail.exceptionType}</Typography>
|
||||||
@@ -145,12 +145,7 @@ function ErrorDetails(props: ErrorDetailsProps): JSX.Element {
|
|||||||
<EventContainer>
|
<EventContainer>
|
||||||
<div>
|
<div>
|
||||||
<Typography>Event {errorDetail.errorId}</Typography>
|
<Typography>Event {errorDetail.errorId}</Typography>
|
||||||
<Typography>
|
<Typography>{timeStamp.format('MMM DD YYYY hh:mm:ss A')}</Typography>
|
||||||
{formatTimezoneAdjustedTimestamp(
|
|
||||||
errorDetail.timestamp,
|
|
||||||
'DD/MM/YYYY hh:mm:ss A (UTC Z)',
|
|
||||||
)}
|
|
||||||
</Typography>
|
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<Space align="end" direction="horizontal">
|
<Space align="end" direction="horizontal">
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
|||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import useComponentPermission from 'hooks/useComponentPermission';
|
import useComponentPermission from 'hooks/useComponentPermission';
|
||||||
import useFetch from 'hooks/useFetch';
|
import useFetch from 'hooks/useFetch';
|
||||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
import { useCallback, useEffect, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
@@ -83,22 +83,16 @@ function BasicInfo({
|
|||||||
window.open(ROUTES.CHANNELS_NEW, '_blank');
|
window.open(ROUTES.CHANNELS_NEW, '_blank');
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, []);
|
||||||
const hasLoggedEvent = useRef(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!channels.loading && isNewRule && !hasLoggedEvent.current) {
|
if (!channels.loading && isNewRule) {
|
||||||
logEvent('Alert: New alert creation page visited', {
|
logEvent('Alert: New alert creation page visited', {
|
||||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||||
numberOfChannels: channels?.payload?.length,
|
numberOfChannels: channels?.payload?.length,
|
||||||
});
|
});
|
||||||
hasLoggedEvent.current = true;
|
|
||||||
}
|
}
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [channels.loading]);
|
}, [channels.payload, channels.loading]);
|
||||||
|
|
||||||
const refetchChannels = async (): Promise<void> => {
|
|
||||||
await channels.refetch();
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
@@ -203,7 +197,7 @@ function BasicInfo({
|
|||||||
{!shouldBroadCastToAllChannels && (
|
{!shouldBroadCastToAllChannels && (
|
||||||
<Tooltip
|
<Tooltip
|
||||||
title={
|
title={
|
||||||
noChannels && !addNewChannelPermission
|
noChannels
|
||||||
? 'No channels. Ask an admin to create a notification channel'
|
? 'No channels. Ask an admin to create a notification channel'
|
||||||
: undefined
|
: undefined
|
||||||
}
|
}
|
||||||
@@ -218,10 +212,10 @@ function BasicInfo({
|
|||||||
]}
|
]}
|
||||||
>
|
>
|
||||||
<ChannelSelect
|
<ChannelSelect
|
||||||
onDropdownOpen={refetchChannels}
|
disabled={
|
||||||
disabled={shouldBroadCastToAllChannels}
|
shouldBroadCastToAllChannels || noChannels || !!channels.loading
|
||||||
|
}
|
||||||
currentValue={alertDef.preferredChannels}
|
currentValue={alertDef.preferredChannels}
|
||||||
handleCreateNewChannels={handleCreateNewChannels}
|
|
||||||
channels={channels}
|
channels={channels}
|
||||||
onSelectChannels={(preferredChannels): void => {
|
onSelectChannels={(preferredChannels): void => {
|
||||||
setAlertDef({
|
setAlertDef({
|
||||||
|
|||||||
@@ -1,33 +1,24 @@
|
|||||||
import { PlusOutlined } from '@ant-design/icons';
|
import { Select } from 'antd';
|
||||||
import { Select, Spin } from 'antd';
|
|
||||||
import useComponentPermission from 'hooks/useComponentPermission';
|
|
||||||
import { State } from 'hooks/useFetch';
|
import { State } from 'hooks/useFetch';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
import { ReactNode } from 'react';
|
import { ReactNode } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useSelector } from 'react-redux';
|
|
||||||
import { AppState } from 'store/reducers';
|
|
||||||
import { PayloadProps } from 'types/api/channels/getAll';
|
import { PayloadProps } from 'types/api/channels/getAll';
|
||||||
import AppReducer from 'types/reducer/app';
|
|
||||||
|
|
||||||
import { StyledCreateChannelOption, StyledSelect } from './styles';
|
import { StyledSelect } from './styles';
|
||||||
|
|
||||||
export interface ChannelSelectProps {
|
export interface ChannelSelectProps {
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
currentValue?: string[];
|
currentValue?: string[];
|
||||||
onSelectChannels: (s: string[]) => void;
|
onSelectChannels: (s: string[]) => void;
|
||||||
onDropdownOpen: () => void;
|
|
||||||
channels: State<PayloadProps | undefined>;
|
channels: State<PayloadProps | undefined>;
|
||||||
handleCreateNewChannels: () => void;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function ChannelSelect({
|
function ChannelSelect({
|
||||||
disabled,
|
disabled,
|
||||||
currentValue,
|
currentValue,
|
||||||
onSelectChannels,
|
onSelectChannels,
|
||||||
onDropdownOpen,
|
|
||||||
channels,
|
channels,
|
||||||
handleCreateNewChannels,
|
|
||||||
}: ChannelSelectProps): JSX.Element | null {
|
}: ChannelSelectProps): JSX.Element | null {
|
||||||
// init namespace for translations
|
// init namespace for translations
|
||||||
const { t } = useTranslation('alerts');
|
const { t } = useTranslation('alerts');
|
||||||
@@ -35,10 +26,6 @@ function ChannelSelect({
|
|||||||
const { notifications } = useNotifications();
|
const { notifications } = useNotifications();
|
||||||
|
|
||||||
const handleChange = (value: string[]): void => {
|
const handleChange = (value: string[]): void => {
|
||||||
if (value.includes('add-new-channel')) {
|
|
||||||
handleCreateNewChannels();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
onSelectChannels(value);
|
onSelectChannels(value);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -48,27 +35,9 @@ function ChannelSelect({
|
|||||||
description: channels.errorMessage,
|
description: channels.errorMessage,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { role } = useSelector<AppState, AppReducer>((state) => state.app);
|
|
||||||
const [addNewChannelPermission] = useComponentPermission(
|
|
||||||
['add_new_channel'],
|
|
||||||
role,
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderOptions = (): ReactNode[] => {
|
const renderOptions = (): ReactNode[] => {
|
||||||
const children: ReactNode[] = [];
|
const children: ReactNode[] = [];
|
||||||
|
|
||||||
if (!channels.loading && addNewChannelPermission) {
|
|
||||||
children.push(
|
|
||||||
<Select.Option key="add-new-channel" value="add-new-channel">
|
|
||||||
<StyledCreateChannelOption>
|
|
||||||
<PlusOutlined />
|
|
||||||
Create a new channel
|
|
||||||
</StyledCreateChannelOption>
|
|
||||||
</Select.Option>,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
channels.loading ||
|
channels.loading ||
|
||||||
channels.payload === undefined ||
|
channels.payload === undefined ||
|
||||||
@@ -87,7 +56,6 @@ function ChannelSelect({
|
|||||||
|
|
||||||
return children;
|
return children;
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<StyledSelect
|
<StyledSelect
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
@@ -97,12 +65,6 @@ function ChannelSelect({
|
|||||||
placeholder={t('placeholder_channel_select')}
|
placeholder={t('placeholder_channel_select')}
|
||||||
data-testid="alert-channel-select"
|
data-testid="alert-channel-select"
|
||||||
value={currentValue}
|
value={currentValue}
|
||||||
notFoundContent={channels.loading && <Spin size="small" />}
|
|
||||||
onDropdownVisibleChange={(open): void => {
|
|
||||||
if (open) {
|
|
||||||
onDropdownOpen();
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
onChange={(value): void => {
|
onChange={(value): void => {
|
||||||
handleChange(value as string[]);
|
handleChange(value as string[]);
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -4,10 +4,3 @@ import styled from 'styled-components';
|
|||||||
export const StyledSelect = styled(Select)`
|
export const StyledSelect = styled(Select)`
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const StyledCreateChannelOption = styled.div`
|
|
||||||
color: var(--bg-robin-500);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
`;
|
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ import getTimeString from 'lib/getTimeString';
|
|||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useDispatch, useSelector } from 'react-redux';
|
import { useDispatch, useSelector } from 'react-redux';
|
||||||
@@ -36,7 +35,6 @@ import { AlertDef } from 'types/api/alerts/def';
|
|||||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
import { EQueryType } from 'types/common/dashboard';
|
import { EQueryType } from 'types/common/dashboard';
|
||||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
import uPlot from 'uplot';
|
|
||||||
import { getGraphType } from 'utils/getGraphType';
|
import { getGraphType } from 'utils/getGraphType';
|
||||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||||
import { getTimeRange } from 'utils/getTimeRange';
|
import { getTimeRange } from 'utils/getTimeRange';
|
||||||
@@ -203,8 +201,6 @@ function ChartPreview({
|
|||||||
[dispatch, location.pathname, urlQuery],
|
[dispatch, location.pathname, urlQuery],
|
||||||
);
|
);
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const options = useMemo(
|
const options = useMemo(
|
||||||
() =>
|
() =>
|
||||||
getUPlotChartOptions({
|
getUPlotChartOptions({
|
||||||
@@ -240,9 +236,6 @@ function ChartPreview({
|
|||||||
softMax: null,
|
softMax: null,
|
||||||
softMin: null,
|
softMin: null,
|
||||||
panelType: graphType,
|
panelType: graphType,
|
||||||
tzDate: (timestamp: number) =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
timezone: timezone?.value,
|
|
||||||
}),
|
}),
|
||||||
[
|
[
|
||||||
yAxisUnit,
|
yAxisUnit,
|
||||||
@@ -257,7 +250,6 @@ function ChartPreview({
|
|||||||
optionName,
|
optionName,
|
||||||
alertDef?.condition.targetUnit,
|
alertDef?.condition.targetUnit,
|
||||||
graphType,
|
graphType,
|
||||||
timezone?.value,
|
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -102,9 +102,9 @@ function RuleOptions({
|
|||||||
<Select.Option value="4">{t('option_notequal')}</Select.Option>
|
<Select.Option value="4">{t('option_notequal')}</Select.Option>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
{/* the value 5 and 6 are reserved for above or equal and below or equal */}
|
|
||||||
{ruleType === 'anomaly_rule' && (
|
{ruleType === 'anomaly_rule' && (
|
||||||
<Select.Option value="7">{t('option_above_below')}</Select.Option>
|
<Select.Option value="5">{t('option_above_below')}</Select.Option>
|
||||||
)}
|
)}
|
||||||
</InlineSelect>
|
</InlineSelect>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { Popover, Typography } from 'antd';
|
|||||||
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
|
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useEffect } from 'react';
|
import { useEffect } from 'react';
|
||||||
import { toFixed } from 'utils/toFixed';
|
import { toFixed } from 'utils/toFixed';
|
||||||
|
|
||||||
@@ -33,17 +32,13 @@ function Span(props: SpanLengthProps): JSX.Element {
|
|||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
const { time, timeUnitName } = convertTimeToRelevantUnit(inMsCount);
|
const { time, timeUnitName } = convertTimeToRelevantUnit(inMsCount);
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
document.documentElement.scrollTop = document.documentElement.clientHeight;
|
document.documentElement.scrollTop = document.documentElement.clientHeight;
|
||||||
document.documentElement.scrollLeft = document.documentElement.clientWidth;
|
document.documentElement.scrollLeft = document.documentElement.clientWidth;
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const getContent = (): JSX.Element => {
|
const getContent = (): JSX.Element => {
|
||||||
const timeStamp = dayjs(startTime)
|
const timeStamp = dayjs(startTime).format('h:mm:ss:SSS A');
|
||||||
.tz(timezone.value)
|
|
||||||
.format('h:mm:ss:SSS A (UTC Z)');
|
|
||||||
const startTimeInMs = startTime - globalStart;
|
const startTimeInMs = startTime - globalStart;
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
|
|||||||
@@ -18,13 +18,8 @@
|
|||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-weight: var(--font-weight-normal);
|
font-weight: var(--font-weight-normal);
|
||||||
line-height: 28px;
|
line-height: 28px;
|
||||||
|
/* 155.556% */
|
||||||
letter-spacing: -0.09px;
|
letter-spacing: -0.09px;
|
||||||
|
|
||||||
width: 72%; // arbitrary number to match input width
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
justify-content: space-between;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.subtitle {
|
.subtitle {
|
||||||
@@ -361,8 +356,6 @@
|
|||||||
flex: 1;
|
flex: 1;
|
||||||
|
|
||||||
.heading {
|
.heading {
|
||||||
margin-bottom: 8px;
|
|
||||||
|
|
||||||
.title {
|
.title {
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
}
|
}
|
||||||
@@ -377,18 +370,6 @@
|
|||||||
.ant-input-number {
|
.ant-input-number {
|
||||||
width: 80%;
|
width: 80%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.no-limit {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
|
|
||||||
margin-bottom: 24px;
|
|
||||||
|
|
||||||
font-weight: 700;
|
|
||||||
font-size: 12px;
|
|
||||||
color: var(--bg-forest-400);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.signal-limit-view-mode {
|
.signal-limit-view-mode {
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import {
|
|||||||
Modal,
|
Modal,
|
||||||
Row,
|
Row,
|
||||||
Select,
|
Select,
|
||||||
Switch,
|
|
||||||
Table,
|
Table,
|
||||||
TablePaginationConfig,
|
TablePaginationConfig,
|
||||||
TableProps as AntDTableProps,
|
TableProps as AntDTableProps,
|
||||||
@@ -31,11 +30,11 @@ import { AxiosError } from 'axios';
|
|||||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||||
import Tags from 'components/Tags/Tags';
|
import Tags from 'components/Tags/Tags';
|
||||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||||
import dayjs from 'dayjs';
|
import dayjs, { Dayjs } from 'dayjs';
|
||||||
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
|
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
|
||||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { useNotifications } from 'hooks/useNotifications';
|
||||||
import { isNil, isUndefined } from 'lodash-es';
|
import { isNil } from 'lodash-es';
|
||||||
import {
|
import {
|
||||||
ArrowUpRight,
|
ArrowUpRight,
|
||||||
CalendarClock,
|
CalendarClock,
|
||||||
@@ -51,7 +50,6 @@ import {
|
|||||||
Trash2,
|
Trash2,
|
||||||
X,
|
X,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { ChangeEvent, useEffect, useState } from 'react';
|
import { ChangeEvent, useEffect, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useMutation } from 'react-query';
|
import { useMutation } from 'react-query';
|
||||||
@@ -71,10 +69,7 @@ const { Option } = Select;
|
|||||||
|
|
||||||
const BYTES = 1073741824;
|
const BYTES = 1073741824;
|
||||||
|
|
||||||
// Using any type here because antd's DatePicker expects its own internal Dayjs type
|
export const disabledDate = (current: Dayjs): boolean =>
|
||||||
// which conflicts with our project's Dayjs type that has additional plugins (tz, utc etc).
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types
|
|
||||||
export const disabledDate = (current: any): boolean =>
|
|
||||||
// Disable all dates before today
|
// Disable all dates before today
|
||||||
current && current < dayjs().endOf('day');
|
current && current < dayjs().endOf('day');
|
||||||
|
|
||||||
@@ -397,11 +392,86 @@ function MultiIngestionSettings(): JSX.Element {
|
|||||||
|
|
||||||
const gbToBytes = (gb: number): number => Math.round(gb * 1024 ** 3);
|
const gbToBytes = (gb: number): number => Math.round(gb * 1024 ** 3);
|
||||||
|
|
||||||
const getFormattedTime = (
|
const getFormattedTime = (date: string): string =>
|
||||||
date: string,
|
dayjs(date).format('MMM DD,YYYY, hh:mm a');
|
||||||
formatTimezoneAdjustedTimestamp: (date: string, format: string) => string,
|
|
||||||
): string =>
|
const handleAddLimit = (
|
||||||
formatTimezoneAdjustedTimestamp(date, 'MMM DD,YYYY, hh:mm a (UTC Z)');
|
APIKey: IngestionKeyProps,
|
||||||
|
signalName: string,
|
||||||
|
): void => {
|
||||||
|
setActiveSignal({
|
||||||
|
id: signalName,
|
||||||
|
signal: signalName,
|
||||||
|
config: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
keyID: APIKey.id,
|
||||||
|
signal: signalName,
|
||||||
|
config: {
|
||||||
|
day: {
|
||||||
|
size: gbToBytes(dailyLimit),
|
||||||
|
},
|
||||||
|
second: {
|
||||||
|
size: gbToBytes(secondsLimit),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
createLimitForIngestionKey(payload);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUpdateLimit = (
|
||||||
|
APIKey: IngestionKeyProps,
|
||||||
|
signal: LimitProps,
|
||||||
|
): void => {
|
||||||
|
setActiveSignal(signal);
|
||||||
|
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
|
||||||
|
const payload = {
|
||||||
|
limitID: signal.id,
|
||||||
|
signal: signal.signal,
|
||||||
|
config: {
|
||||||
|
day: {
|
||||||
|
size: gbToBytes(dailyLimit),
|
||||||
|
},
|
||||||
|
second: {
|
||||||
|
size: gbToBytes(secondsLimit),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
updateLimitForIngestionKey(payload);
|
||||||
|
};
|
||||||
|
|
||||||
|
const bytesToGb = (size: number | undefined): number => {
|
||||||
|
if (!size) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return size / BYTES;
|
||||||
|
};
|
||||||
|
|
||||||
|
const enableEditLimitMode = (
|
||||||
|
APIKey: IngestionKeyProps,
|
||||||
|
signal: LimitProps,
|
||||||
|
): void => {
|
||||||
|
setActiveAPIKey(APIKey);
|
||||||
|
setActiveSignal(signal);
|
||||||
|
|
||||||
|
addEditLimitForm.setFieldsValue({
|
||||||
|
dailyLimit: bytesToGb(signal?.config?.day?.size || 0),
|
||||||
|
secondsLimit: bytesToGb(signal?.config?.second?.size || 0),
|
||||||
|
});
|
||||||
|
|
||||||
|
setIsEditAddLimitOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const onDeleteLimitHandler = (): void => {
|
||||||
|
if (activeSignal && activeSignal?.id) {
|
||||||
|
deleteLimitForKey(activeSignal.id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const showDeleteLimitModal = (
|
const showDeleteLimitModal = (
|
||||||
APIKey: IngestionKeyProps,
|
APIKey: IngestionKeyProps,
|
||||||
@@ -426,152 +496,17 @@ function MultiIngestionSettings(): JSX.Element {
|
|||||||
addEditLimitForm.resetFields();
|
addEditLimitForm.resetFields();
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleAddLimit = (
|
|
||||||
APIKey: IngestionKeyProps,
|
|
||||||
signalName: string,
|
|
||||||
): void => {
|
|
||||||
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
keyID: APIKey.id,
|
|
||||||
signal: signalName,
|
|
||||||
config: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!isUndefined(dailyLimit)) {
|
|
||||||
payload.config = {
|
|
||||||
day: {
|
|
||||||
size: gbToBytes(dailyLimit),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isUndefined(secondsLimit)) {
|
|
||||||
payload.config = {
|
|
||||||
...payload.config,
|
|
||||||
second: {
|
|
||||||
size: gbToBytes(secondsLimit),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isUndefined(dailyLimit) && isUndefined(secondsLimit)) {
|
|
||||||
// No need to save as no limit is provided, close the edit view and reset active signal and api key
|
|
||||||
setActiveSignal(null);
|
|
||||||
setActiveAPIKey(null);
|
|
||||||
setIsEditAddLimitOpen(false);
|
|
||||||
setUpdatedTags([]);
|
|
||||||
hideAddViewModal();
|
|
||||||
setHasCreateLimitForIngestionKeyError(false);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
createLimitForIngestionKey(payload);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleUpdateLimit = (
|
|
||||||
APIKey: IngestionKeyProps,
|
|
||||||
signal: LimitProps,
|
|
||||||
): void => {
|
|
||||||
const { dailyLimit, secondsLimit } = addEditLimitForm.getFieldsValue();
|
|
||||||
const payload = {
|
|
||||||
limitID: signal.id,
|
|
||||||
signal: signal.signal,
|
|
||||||
config: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
if (isUndefined(dailyLimit) && isUndefined(secondsLimit)) {
|
|
||||||
showDeleteLimitModal(APIKey, signal);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isUndefined(dailyLimit)) {
|
|
||||||
payload.config = {
|
|
||||||
day: {
|
|
||||||
size: gbToBytes(dailyLimit),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isUndefined(secondsLimit)) {
|
|
||||||
payload.config = {
|
|
||||||
...payload.config,
|
|
||||||
second: {
|
|
||||||
size: gbToBytes(secondsLimit),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
updateLimitForIngestionKey(payload);
|
|
||||||
};
|
|
||||||
|
|
||||||
const bytesToGb = (size: number | undefined): number => {
|
|
||||||
if (!size) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
return size / BYTES;
|
|
||||||
};
|
|
||||||
|
|
||||||
const enableEditLimitMode = (
|
|
||||||
APIKey: IngestionKeyProps,
|
|
||||||
signal: LimitProps,
|
|
||||||
): void => {
|
|
||||||
setActiveAPIKey(APIKey);
|
|
||||||
setActiveSignal({
|
|
||||||
...signal,
|
|
||||||
config: {
|
|
||||||
...signal.config,
|
|
||||||
day: {
|
|
||||||
...signal.config?.day,
|
|
||||||
enabled: !isNil(signal?.config?.day?.size),
|
|
||||||
},
|
|
||||||
second: {
|
|
||||||
...signal.config?.second,
|
|
||||||
enabled: !isNil(signal?.config?.second?.size),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
addEditLimitForm.setFieldsValue({
|
|
||||||
dailyLimit: bytesToGb(signal?.config?.day?.size || 0),
|
|
||||||
secondsLimit: bytesToGb(signal?.config?.second?.size || 0),
|
|
||||||
enableDailyLimit: !isNil(signal?.config?.day?.size),
|
|
||||||
enableSecondLimit: !isNil(signal?.config?.second?.size),
|
|
||||||
});
|
|
||||||
|
|
||||||
setIsEditAddLimitOpen(true);
|
|
||||||
};
|
|
||||||
|
|
||||||
const onDeleteLimitHandler = (): void => {
|
|
||||||
if (activeSignal && activeSignal?.id) {
|
|
||||||
deleteLimitForKey(activeSignal.id);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const columns: AntDTableProps<IngestionKeyProps>['columns'] = [
|
const columns: AntDTableProps<IngestionKeyProps>['columns'] = [
|
||||||
{
|
{
|
||||||
title: 'Ingestion Key',
|
title: 'Ingestion Key',
|
||||||
key: 'ingestion-key',
|
key: 'ingestion-key',
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
render: (APIKey: IngestionKeyProps): JSX.Element => {
|
render: (APIKey: IngestionKeyProps): JSX.Element => {
|
||||||
const createdOn = getFormattedTime(
|
const createdOn = getFormattedTime(APIKey.created_at);
|
||||||
APIKey.created_at,
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
);
|
|
||||||
const formattedDateAndTime =
|
const formattedDateAndTime =
|
||||||
APIKey &&
|
APIKey && APIKey?.expires_at && getFormattedTime(APIKey?.expires_at);
|
||||||
APIKey?.expires_at &&
|
|
||||||
getFormattedTime(APIKey?.expires_at, formatTimezoneAdjustedTimestamp);
|
|
||||||
|
|
||||||
const updatedOn = getFormattedTime(
|
const updatedOn = getFormattedTime(APIKey?.updated_at);
|
||||||
APIKey?.updated_at,
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
);
|
|
||||||
|
|
||||||
const limits: { [key: string]: LimitProps } = {};
|
const limits: { [key: string]: LimitProps } = {};
|
||||||
|
|
||||||
@@ -749,108 +684,50 @@ function MultiIngestionSettings(): JSX.Element {
|
|||||||
<div className="signal-limit-edit-mode">
|
<div className="signal-limit-edit-mode">
|
||||||
<div className="daily-limit">
|
<div className="daily-limit">
|
||||||
<div className="heading">
|
<div className="heading">
|
||||||
<div className="title">
|
<div className="title"> Daily limit </div>
|
||||||
Daily limit
|
|
||||||
<div className="limit-enable-disable-toggle">
|
|
||||||
<Form.Item name="enableDailyLimit">
|
|
||||||
<Switch
|
|
||||||
size="small"
|
|
||||||
checked={activeSignal?.config?.day?.enabled}
|
|
||||||
onChange={(value): void => {
|
|
||||||
setActiveSignal({
|
|
||||||
...activeSignal,
|
|
||||||
config: {
|
|
||||||
...activeSignal.config,
|
|
||||||
day: {
|
|
||||||
...activeSignal.config?.day,
|
|
||||||
enabled: value,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Form.Item>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="subtitle">
|
<div className="subtitle">
|
||||||
Add a limit for data ingested daily
|
Add a limit for data ingested daily{' '}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="size">
|
<div className="size">
|
||||||
{activeSignal?.config?.day?.enabled ? (
|
<Form.Item name="dailyLimit">
|
||||||
<Form.Item name="dailyLimit" key="dailyLimit">
|
<InputNumber
|
||||||
<InputNumber
|
addonAfter={
|
||||||
disabled={!activeSignal?.config?.day?.enabled}
|
<Select defaultValue="GiB" disabled>
|
||||||
key="dailyLimit"
|
<Option value="TiB"> TiB</Option>
|
||||||
addonAfter={
|
<Option value="GiB"> GiB</Option>
|
||||||
<Select defaultValue="GiB" disabled>
|
<Option value="MiB"> MiB </Option>
|
||||||
<Option value="TiB"> TiB</Option>
|
<Option value="KiB"> KiB </Option>
|
||||||
<Option value="GiB"> GiB</Option>
|
</Select>
|
||||||
<Option value="MiB"> MiB </Option>
|
}
|
||||||
<Option value="KiB"> KiB </Option>
|
/>
|
||||||
</Select>
|
</Form.Item>
|
||||||
}
|
|
||||||
/>
|
|
||||||
</Form.Item>
|
|
||||||
) : (
|
|
||||||
<div className="no-limit">
|
|
||||||
<Infinity size={16} /> NO LIMIT
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="second-limit">
|
<div className="second-limit">
|
||||||
<div className="heading">
|
<div className="heading">
|
||||||
<div className="title">
|
<div className="title"> Per Second limit </div>
|
||||||
Per Second limit{' '}
|
|
||||||
<div className="limit-enable-disable-toggle">
|
|
||||||
<Form.Item name="enableSecondLimit">
|
|
||||||
<Switch
|
|
||||||
size="small"
|
|
||||||
checked={activeSignal?.config?.second?.enabled}
|
|
||||||
onChange={(value): void => {
|
|
||||||
setActiveSignal({
|
|
||||||
...activeSignal,
|
|
||||||
config: {
|
|
||||||
...activeSignal.config,
|
|
||||||
second: {
|
|
||||||
...activeSignal.config?.second,
|
|
||||||
enabled: value,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Form.Item>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="subtitle">
|
<div className="subtitle">
|
||||||
Add a limit for data ingested every second
|
{' '}
|
||||||
|
Add a limit for data ingested every second{' '}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="size">
|
<div className="size">
|
||||||
{activeSignal?.config?.second?.enabled ? (
|
<Form.Item name="secondsLimit">
|
||||||
<Form.Item name="secondsLimit" key="secondsLimit">
|
<InputNumber
|
||||||
<InputNumber
|
addonAfter={
|
||||||
key="secondsLimit"
|
<Select defaultValue="GiB" disabled>
|
||||||
disabled={!activeSignal?.config?.second?.enabled}
|
<Option value="TiB"> TiB</Option>
|
||||||
addonAfter={
|
<Option value="GiB"> GiB</Option>
|
||||||
<Select defaultValue="GiB" disabled>
|
<Option value="MiB"> MiB </Option>
|
||||||
<Option value="TiB"> TiB</Option>
|
<Option value="KiB"> KiB </Option>
|
||||||
<Option value="GiB"> GiB</Option>
|
</Select>
|
||||||
<Option value="MiB"> MiB </Option>
|
}
|
||||||
<Option value="KiB"> KiB </Option>
|
/>
|
||||||
</Select>
|
</Form.Item>
|
||||||
}
|
|
||||||
/>
|
|
||||||
</Form.Item>
|
|
||||||
) : (
|
|
||||||
<div className="no-limit">
|
|
||||||
<Infinity size={16} /> NO LIMIT
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,20 +1,8 @@
|
|||||||
import { Typography } from 'antd';
|
|
||||||
import { ColumnsType } from 'antd/lib/table';
|
import { ColumnsType } from 'antd/lib/table';
|
||||||
import { ResizeTable } from 'components/ResizeTable';
|
import { ResizeTable } from 'components/ResizeTable';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { License } from 'types/api/licenses/def';
|
import { License } from 'types/api/licenses/def';
|
||||||
|
|
||||||
function ValidityColumn({ value }: { value: string }): JSX.Element {
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Typography>
|
|
||||||
{formatTimezoneAdjustedTimestamp(value, 'YYYY-MM-DD HH:mm:ss (UTC Z)')}
|
|
||||||
</Typography>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function ListLicenses({ licenses }: ListLicensesProps): JSX.Element {
|
function ListLicenses({ licenses }: ListLicensesProps): JSX.Element {
|
||||||
const { t } = useTranslation(['licenses']);
|
const { t } = useTranslation(['licenses']);
|
||||||
|
|
||||||
@@ -35,14 +23,12 @@ function ListLicenses({ licenses }: ListLicensesProps): JSX.Element {
|
|||||||
title: t('column_valid_from'),
|
title: t('column_valid_from'),
|
||||||
dataIndex: 'ValidFrom',
|
dataIndex: 'ValidFrom',
|
||||||
key: 'valid from',
|
key: 'valid from',
|
||||||
render: (value: string): JSX.Element => ValidityColumn({ value }),
|
|
||||||
width: 80,
|
width: 80,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: t('column_valid_until'),
|
title: t('column_valid_until'),
|
||||||
dataIndex: 'ValidUntil',
|
dataIndex: 'ValidUntil',
|
||||||
key: 'valid until',
|
key: 'valid until',
|
||||||
render: (value: string): JSX.Element => ValidityColumn({ value }),
|
|
||||||
width: 80,
|
width: 80,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -57,7 +57,6 @@ import {
|
|||||||
// see more: https://github.com/lucide-icons/lucide/issues/94
|
// see more: https://github.com/lucide-icons/lucide/issues/94
|
||||||
import { handleContactSupport } from 'pages/Integrations/utils';
|
import { handleContactSupport } from 'pages/Integrations/utils';
|
||||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
import {
|
||||||
ChangeEvent,
|
ChangeEvent,
|
||||||
Key,
|
Key,
|
||||||
@@ -344,13 +343,31 @@ function DashboardsList(): JSX.Element {
|
|||||||
}
|
}
|
||||||
}, [state.error, state.value, t, notifications]);
|
}, [state.error, state.value, t, notifications]);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
function getFormattedTime(dashboard: Dashboard, option: string): string {
|
function getFormattedTime(dashboard: Dashboard, option: string): string {
|
||||||
return formatTimezoneAdjustedTimestamp(
|
const timeOptions: Intl.DateTimeFormatOptions = {
|
||||||
get(dashboard, option, ''),
|
hour: '2-digit',
|
||||||
'MMM D, YYYY ⎯ HH:mm:ss',
|
minute: '2-digit',
|
||||||
|
second: '2-digit',
|
||||||
|
hour12: false,
|
||||||
|
};
|
||||||
|
const formattedTime = new Date(get(dashboard, option, '')).toLocaleTimeString(
|
||||||
|
'en-US',
|
||||||
|
timeOptions,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const dateOptions: Intl.DateTimeFormatOptions = {
|
||||||
|
month: 'short',
|
||||||
|
day: 'numeric',
|
||||||
|
year: 'numeric',
|
||||||
|
};
|
||||||
|
|
||||||
|
const formattedDate = new Date(get(dashboard, option, '')).toLocaleDateString(
|
||||||
|
'en-US',
|
||||||
|
dateOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Combine time and date
|
||||||
|
return `${formattedDate} ⎯ ${formattedTime}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const onLastUpdated = (time: string): string => {
|
const onLastUpdated = (time: string): string => {
|
||||||
@@ -393,11 +410,31 @@ function DashboardsList(): JSX.Element {
|
|||||||
title: 'Dashboards',
|
title: 'Dashboards',
|
||||||
key: 'dashboard',
|
key: 'dashboard',
|
||||||
render: (dashboard: Data, _, index): JSX.Element => {
|
render: (dashboard: Data, _, index): JSX.Element => {
|
||||||
const formattedDateAndTime = formatTimezoneAdjustedTimestamp(
|
const timeOptions: Intl.DateTimeFormatOptions = {
|
||||||
dashboard.createdAt,
|
hour: '2-digit',
|
||||||
'MMM D, YYYY ⎯ HH:mm:ss',
|
minute: '2-digit',
|
||||||
|
second: '2-digit',
|
||||||
|
hour12: false,
|
||||||
|
};
|
||||||
|
const formattedTime = new Date(dashboard.createdAt).toLocaleTimeString(
|
||||||
|
'en-US',
|
||||||
|
timeOptions,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const dateOptions: Intl.DateTimeFormatOptions = {
|
||||||
|
month: 'short',
|
||||||
|
day: 'numeric',
|
||||||
|
year: 'numeric',
|
||||||
|
};
|
||||||
|
|
||||||
|
const formattedDate = new Date(dashboard.createdAt).toLocaleDateString(
|
||||||
|
'en-US',
|
||||||
|
dateOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Combine time and date
|
||||||
|
const formattedDateAndTime = `${formattedDate} ⎯ ${formattedTime}`;
|
||||||
|
|
||||||
const getLink = (): string => `${ROUTES.ALL_DASHBOARD}/${dashboard.id}`;
|
const getLink = (): string => `${ROUTES.ALL_DASHBOARD}/${dashboard.id}`;
|
||||||
|
|
||||||
const onClickHandler = (event: React.MouseEvent<HTMLElement>): void => {
|
const onClickHandler = (event: React.MouseEvent<HTMLElement>): void => {
|
||||||
|
|||||||
@@ -82,8 +82,9 @@ function ImportJSON({
|
|||||||
|
|
||||||
const dashboardData = JSON.parse(editorValue) as DashboardData;
|
const dashboardData = JSON.parse(editorValue) as DashboardData;
|
||||||
|
|
||||||
// Remove uuid from the dashboard data, in all cases - empty, duplicate or any valid not duplicate uuid
|
// Add validation for uuid
|
||||||
if (dashboardData.uuid !== undefined) {
|
if (dashboardData.uuid !== undefined && dashboardData.uuid.trim() === '') {
|
||||||
|
// silently remove uuid if it is empty
|
||||||
delete dashboardData.uuid;
|
delete dashboardData.uuid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,10 @@ import { useResizeObserver } from 'hooks/useDimensions';
|
|||||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useMemo, useRef } from 'react';
|
import { useMemo, useRef } from 'react';
|
||||||
import { useQueries, UseQueryResult } from 'react-query';
|
import { useQueries, UseQueryResult } from 'react-query';
|
||||||
import { SuccessResponse } from 'types/api';
|
import { SuccessResponse } from 'types/api';
|
||||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||||
import uPlot from 'uplot';
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
getHostQueryPayload,
|
getHostQueryPayload,
|
||||||
@@ -75,8 +73,6 @@ function NodeMetrics({
|
|||||||
[queries],
|
[queries],
|
||||||
);
|
);
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const options = useMemo(
|
const options = useMemo(
|
||||||
() =>
|
() =>
|
||||||
queries.map(({ data }, idx) =>
|
queries.map(({ data }, idx) =>
|
||||||
@@ -90,9 +86,6 @@ function NodeMetrics({
|
|||||||
minTimeScale: start,
|
minTimeScale: start,
|
||||||
maxTimeScale: end,
|
maxTimeScale: end,
|
||||||
verticalLineTimestamp,
|
verticalLineTimestamp,
|
||||||
tzDate: (timestamp: number) =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
timezone: timezone?.value,
|
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
[
|
[
|
||||||
@@ -103,7 +96,6 @@ function NodeMetrics({
|
|||||||
start,
|
start,
|
||||||
verticalLineTimestamp,
|
verticalLineTimestamp,
|
||||||
end,
|
end,
|
||||||
timezone?.value,
|
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,10 @@ import { useResizeObserver } from 'hooks/useDimensions';
|
|||||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useMemo, useRef } from 'react';
|
import { useMemo, useRef } from 'react';
|
||||||
import { useQueries, UseQueryResult } from 'react-query';
|
import { useQueries, UseQueryResult } from 'react-query';
|
||||||
import { SuccessResponse } from 'types/api';
|
import { SuccessResponse } from 'types/api';
|
||||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||||
import uPlot from 'uplot';
|
|
||||||
|
|
||||||
import { getPodQueryPayload, podWidgetInfo } from './constants';
|
import { getPodQueryPayload, podWidgetInfo } from './constants';
|
||||||
|
|
||||||
@@ -62,7 +60,6 @@ function PodMetrics({
|
|||||||
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
|
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
|
||||||
[queries],
|
[queries],
|
||||||
);
|
);
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const options = useMemo(
|
const options = useMemo(
|
||||||
() =>
|
() =>
|
||||||
@@ -77,20 +74,9 @@ function PodMetrics({
|
|||||||
minTimeScale: start,
|
minTimeScale: start,
|
||||||
maxTimeScale: end,
|
maxTimeScale: end,
|
||||||
verticalLineTimestamp,
|
verticalLineTimestamp,
|
||||||
tzDate: (timestamp: number) =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
timezone: timezone?.value,
|
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
[
|
[queries, isDarkMode, dimensions, start, verticalLineTimestamp, end],
|
||||||
queries,
|
|
||||||
isDarkMode,
|
|
||||||
dimensions,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
verticalLineTimestamp,
|
|
||||||
timezone?.value,
|
|
||||||
],
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const renderCardContent = (
|
const renderCardContent = (
|
||||||
|
|||||||
@@ -11,8 +11,7 @@ import ROUTES from 'constants/routes';
|
|||||||
import dompurify from 'dompurify';
|
import dompurify from 'dompurify';
|
||||||
import { isEmpty } from 'lodash-es';
|
import { isEmpty } from 'lodash-es';
|
||||||
import { ArrowDownToDot, ArrowUpFromDot, Ellipsis } from 'lucide-react';
|
import { ArrowDownToDot, ArrowUpFromDot, Ellipsis } from 'lucide-react';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import { useMemo, useState } from 'react';
|
||||||
import React, { useMemo, useState } from 'react';
|
|
||||||
import { useLocation } from 'react-router-dom';
|
import { useLocation } from 'react-router-dom';
|
||||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
||||||
@@ -69,8 +68,6 @@ export function TableViewActions(
|
|||||||
|
|
||||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
if (record.field === 'body') {
|
if (record.field === 'body') {
|
||||||
const parsedBody = recursiveParseJSON(fieldData.value);
|
const parsedBody = recursiveParseJSON(fieldData.value);
|
||||||
if (!isEmpty(parsedBody)) {
|
if (!isEmpty(parsedBody)) {
|
||||||
@@ -103,44 +100,33 @@ export function TableViewActions(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let cleanTimestamp: string;
|
|
||||||
if (record.field === 'timestamp') {
|
|
||||||
cleanTimestamp = fieldData.value.replace(/^["']|["']$/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderFieldContent = (): JSX.Element => {
|
|
||||||
const commonStyles: React.CSSProperties = {
|
|
||||||
color: Color.BG_SIENNA_400,
|
|
||||||
whiteSpace: 'pre-wrap',
|
|
||||||
tabSize: 4,
|
|
||||||
};
|
|
||||||
|
|
||||||
switch (record.field) {
|
|
||||||
case 'body':
|
|
||||||
return <span style={commonStyles} dangerouslySetInnerHTML={bodyHtml} />;
|
|
||||||
|
|
||||||
case 'timestamp':
|
|
||||||
return (
|
|
||||||
<span style={commonStyles}>
|
|
||||||
{formatTimezoneAdjustedTimestamp(
|
|
||||||
cleanTimestamp,
|
|
||||||
'MM/DD/YYYY, HH:mm:ss.SSS (UTC Z)',
|
|
||||||
)}
|
|
||||||
</span>
|
|
||||||
);
|
|
||||||
|
|
||||||
default:
|
|
||||||
return (
|
|
||||||
<span style={commonStyles}>{removeEscapeCharacters(fieldData.value)}</span>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
||||||
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
|
{record.field === 'body' ? (
|
||||||
{renderFieldContent()}
|
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
|
||||||
</CopyClipboardHOC>
|
<span
|
||||||
|
style={{
|
||||||
|
color: Color.BG_SIENNA_400,
|
||||||
|
whiteSpace: 'pre-wrap',
|
||||||
|
tabSize: 4,
|
||||||
|
}}
|
||||||
|
dangerouslySetInnerHTML={bodyHtml}
|
||||||
|
/>
|
||||||
|
</CopyClipboardHOC>
|
||||||
|
) : (
|
||||||
|
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: Color.BG_SIENNA_400,
|
||||||
|
whiteSpace: 'pre-wrap',
|
||||||
|
tabSize: 4,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{removeEscapeCharacters(fieldData.value)}
|
||||||
|
</span>
|
||||||
|
</CopyClipboardHOC>
|
||||||
|
)}
|
||||||
|
|
||||||
{!isListViewPanel && (
|
{!isListViewPanel && (
|
||||||
<span className="action-btn">
|
<span className="action-btn">
|
||||||
<Tooltip title="Filter for value">
|
<Tooltip title="Filter for value">
|
||||||
|
|||||||
@@ -202,7 +202,6 @@ function LogsExplorerViews({
|
|||||||
id: 'severity_text--string----true',
|
id: 'severity_text--string----true',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
legend: '{{severity_text}}',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const modifiedQuery: Query = {
|
const modifiedQuery: Query = {
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import { useLogsData } from 'hooks/useLogsData';
|
|||||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
import {
|
||||||
Dispatch,
|
Dispatch,
|
||||||
HTMLAttributes,
|
HTMLAttributes,
|
||||||
@@ -77,12 +76,7 @@ function LogsPanelComponent({
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
const columns = getLogPanelColumnsList(widget.selectedLogFields);
|
||||||
|
|
||||||
const columns = getLogPanelColumnsList(
|
|
||||||
widget.selectedLogFields,
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dataLength =
|
const dataLength =
|
||||||
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
|
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { ColumnsType } from 'antd/es/table';
|
import { ColumnsType } from 'antd/es/table';
|
||||||
import { Typography } from 'antd/lib';
|
import { Typography } from 'antd/lib';
|
||||||
import { OPERATORS } from 'constants/queryBuilder';
|
import { OPERATORS } from 'constants/queryBuilder';
|
||||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
|
||||||
// import Typography from 'antd/es/typography/Typography';
|
// import Typography from 'antd/es/typography/Typography';
|
||||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
import { ReactNode } from 'react';
|
import { ReactNode } from 'react';
|
||||||
@@ -14,31 +13,18 @@ import { v4 as uuid } from 'uuid';
|
|||||||
|
|
||||||
export const getLogPanelColumnsList = (
|
export const getLogPanelColumnsList = (
|
||||||
selectedLogFields: Widgets['selectedLogFields'],
|
selectedLogFields: Widgets['selectedLogFields'],
|
||||||
formatTimezoneAdjustedTimestamp: (
|
|
||||||
input: TimestampInput,
|
|
||||||
format?: string,
|
|
||||||
) => string,
|
|
||||||
): ColumnsType<RowData> => {
|
): ColumnsType<RowData> => {
|
||||||
const initialColumns: ColumnsType<RowData> = [];
|
const initialColumns: ColumnsType<RowData> = [];
|
||||||
|
|
||||||
const columns: ColumnsType<RowData> =
|
const columns: ColumnsType<RowData> =
|
||||||
selectedLogFields?.map((field: IField) => {
|
selectedLogFields?.map((field: IField) => {
|
||||||
const { name } = field;
|
const { name } = field;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
title: name,
|
title: name,
|
||||||
dataIndex: name,
|
dataIndex: name,
|
||||||
key: name,
|
key: name,
|
||||||
width: name === 'body' ? 350 : 100,
|
width: name === 'body' ? 350 : 100,
|
||||||
render: (value: ReactNode): JSX.Element => {
|
render: (value: ReactNode): JSX.Element => {
|
||||||
if (name === 'timestamp') {
|
|
||||||
return (
|
|
||||||
<Typography.Text>
|
|
||||||
{formatTimezoneAdjustedTimestamp(value as string)}
|
|
||||||
</Typography.Text>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (name === 'body') {
|
if (name === 'body') {
|
||||||
return (
|
return (
|
||||||
<Typography.Paragraph ellipsis={{ rows: 1 }} data-testid={name}>
|
<Typography.Paragraph ellipsis={{ rows: 1 }} data-testid={name}>
|
||||||
|
|||||||
@@ -58,17 +58,12 @@ export const databaseCallsRPS = ({
|
|||||||
const legends = [legend];
|
const legends = [legend];
|
||||||
const dataSource = DataSource.METRICS;
|
const dataSource = DataSource.METRICS;
|
||||||
|
|
||||||
const timeAggregateOperators = [MetricAggregateOperator.RATE];
|
|
||||||
const spaceAggregateOperators = [MetricAggregateOperator.SUM];
|
|
||||||
|
|
||||||
return getQueryBuilderQueries({
|
return getQueryBuilderQueries({
|
||||||
autocompleteData,
|
autocompleteData,
|
||||||
groupBy,
|
groupBy,
|
||||||
legends,
|
legends,
|
||||||
filterItems,
|
filterItems,
|
||||||
dataSource,
|
dataSource,
|
||||||
timeAggregateOperators,
|
|
||||||
spaceAggregateOperators,
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -213,17 +213,12 @@ export const externalCallRpsByAddress = ({
|
|||||||
const legends = [legend];
|
const legends = [legend];
|
||||||
const dataSource = DataSource.METRICS;
|
const dataSource = DataSource.METRICS;
|
||||||
|
|
||||||
const timeAggregateOperators = [MetricAggregateOperator.RATE];
|
|
||||||
const spaceAggregateOperators = [MetricAggregateOperator.SUM];
|
|
||||||
|
|
||||||
return getQueryBuilderQueries({
|
return getQueryBuilderQueries({
|
||||||
autocompleteData,
|
autocompleteData,
|
||||||
groupBy,
|
groupBy,
|
||||||
legends,
|
legends,
|
||||||
filterItems,
|
filterItems,
|
||||||
dataSource,
|
dataSource,
|
||||||
timeAggregateOperators,
|
|
||||||
spaceAggregateOperators,
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -25,8 +25,6 @@ export const getQueryBuilderQueries = ({
|
|||||||
aggregateOperator,
|
aggregateOperator,
|
||||||
dataSource,
|
dataSource,
|
||||||
queryNameAndExpression,
|
queryNameAndExpression,
|
||||||
timeAggregateOperators,
|
|
||||||
spaceAggregateOperators,
|
|
||||||
}: BuilderQueriesProps): QueryBuilderData => ({
|
}: BuilderQueriesProps): QueryBuilderData => ({
|
||||||
queryFormulas: [],
|
queryFormulas: [],
|
||||||
queryData: autocompleteData.map((item, index) => {
|
queryData: autocompleteData.map((item, index) => {
|
||||||
@@ -52,8 +50,6 @@ export const getQueryBuilderQueries = ({
|
|||||||
op: 'AND',
|
op: 'AND',
|
||||||
},
|
},
|
||||||
reduceTo: 'avg',
|
reduceTo: 'avg',
|
||||||
spaceAggregation: spaceAggregateOperators[index],
|
|
||||||
timeAggregation: timeAggregateOperators[index],
|
|
||||||
dataSource,
|
dataSource,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -83,17 +83,6 @@ export const latency = ({
|
|||||||
const dataSource = isSpanMetricEnable ? DataSource.METRICS : DataSource.TRACES;
|
const dataSource = isSpanMetricEnable ? DataSource.METRICS : DataSource.TRACES;
|
||||||
const queryNameAndExpression = QUERYNAME_AND_EXPRESSION;
|
const queryNameAndExpression = QUERYNAME_AND_EXPRESSION;
|
||||||
|
|
||||||
const timeAggregateOperators = [
|
|
||||||
MetricAggregateOperator.EMPTY,
|
|
||||||
MetricAggregateOperator.EMPTY,
|
|
||||||
MetricAggregateOperator.EMPTY,
|
|
||||||
];
|
|
||||||
const spaceAggregateOperators = [
|
|
||||||
MetricAggregateOperator.P50,
|
|
||||||
MetricAggregateOperator.P90,
|
|
||||||
MetricAggregateOperator.P99,
|
|
||||||
];
|
|
||||||
|
|
||||||
return getQueryBuilderQueries({
|
return getQueryBuilderQueries({
|
||||||
autocompleteData,
|
autocompleteData,
|
||||||
legends,
|
legends,
|
||||||
@@ -101,8 +90,6 @@ export const latency = ({
|
|||||||
aggregateOperator,
|
aggregateOperator,
|
||||||
dataSource,
|
dataSource,
|
||||||
queryNameAndExpression,
|
queryNameAndExpression,
|
||||||
timeAggregateOperators,
|
|
||||||
spaceAggregateOperators,
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -523,16 +510,11 @@ export const operationPerSec = ({
|
|||||||
const legends = OPERATION_LEGENDS;
|
const legends = OPERATION_LEGENDS;
|
||||||
const dataSource = DataSource.METRICS;
|
const dataSource = DataSource.METRICS;
|
||||||
|
|
||||||
const timeAggregateOperators = [MetricAggregateOperator.RATE];
|
|
||||||
const spaceAggregateOperators = [MetricAggregateOperator.SUM];
|
|
||||||
|
|
||||||
return getQueryBuilderQueries({
|
return getQueryBuilderQueries({
|
||||||
autocompleteData,
|
autocompleteData,
|
||||||
legends,
|
legends,
|
||||||
filterItems,
|
filterItems,
|
||||||
dataSource,
|
dataSource,
|
||||||
timeAggregateOperators,
|
|
||||||
spaceAggregateOperators,
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -29,8 +29,6 @@ export interface BuilderQueriesProps {
|
|||||||
aggregateOperator?: string[];
|
aggregateOperator?: string[];
|
||||||
dataSource: DataSource;
|
dataSource: DataSource;
|
||||||
queryNameAndExpression?: string[];
|
queryNameAndExpression?: string[];
|
||||||
timeAggregateOperators: MetricAggregateOperator[];
|
|
||||||
spaceAggregateOperators: MetricAggregateOperator[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BuilderQuerieswithFormulaProps {
|
export interface BuilderQuerieswithFormulaProps {
|
||||||
|
|||||||
@@ -2,27 +2,18 @@
|
|||||||
|
|
||||||
import { DownloadOptions } from 'container/Download/Download.types';
|
import { DownloadOptions } from 'container/Download/Download.types';
|
||||||
import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
|
import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
|
||||||
import {
|
|
||||||
MetricAggregateOperator,
|
|
||||||
TracesAggregatorOperator,
|
|
||||||
} from 'types/common/queryBuilder';
|
|
||||||
|
|
||||||
export const legend = {
|
export const legend = {
|
||||||
address: '{{address}}',
|
address: '{{address}}',
|
||||||
};
|
};
|
||||||
|
|
||||||
export const QUERYNAME_AND_EXPRESSION = ['A', 'B', 'C'];
|
export const QUERYNAME_AND_EXPRESSION = ['A', 'B', 'C'];
|
||||||
export const LATENCY_AGGREGATEOPERATOR = [
|
export const LATENCY_AGGREGATEOPERATOR = ['p50', 'p90', 'p99'];
|
||||||
TracesAggregatorOperator.P50,
|
|
||||||
TracesAggregatorOperator.P90,
|
|
||||||
TracesAggregatorOperator.P99,
|
|
||||||
];
|
|
||||||
export const LATENCY_AGGREGATEOPERATOR_SPAN_METRICS = [
|
export const LATENCY_AGGREGATEOPERATOR_SPAN_METRICS = [
|
||||||
MetricAggregateOperator.P50,
|
'hist_quantile_50',
|
||||||
MetricAggregateOperator.P90,
|
'hist_quantile_90',
|
||||||
MetricAggregateOperator.P99,
|
'hist_quantile_99',
|
||||||
];
|
];
|
||||||
|
|
||||||
export const OPERATION_LEGENDS = ['Operations'];
|
export const OPERATION_LEGENDS = ['Operations'];
|
||||||
|
|
||||||
export const MENU_ITEMS = [MenuItemKeys.View, MenuItemKeys.CreateAlerts];
|
export const MENU_ITEMS = [MenuItemKeys.View, MenuItemKeys.CreateAlerts];
|
||||||
@@ -30,21 +21,8 @@ export const MENU_ITEMS = [MenuItemKeys.View, MenuItemKeys.CreateAlerts];
|
|||||||
export enum FORMULA {
|
export enum FORMULA {
|
||||||
ERROR_PERCENTAGE = 'A*100/B',
|
ERROR_PERCENTAGE = 'A*100/B',
|
||||||
DATABASE_CALLS_AVG_DURATION = 'A/B',
|
DATABASE_CALLS_AVG_DURATION = 'A/B',
|
||||||
// The apdex formula is (satisfied_count + 0.5 * tolerating_count + 0 * frustating_count) / total_count
|
|
||||||
// The satisfied_count is B, tolerating_count is C, total_count is A
|
|
||||||
// But why do we have (B+C)/2 instead of B + C/2?
|
|
||||||
// The way we issue the query is latency <= threshold, which means we over count i.e
|
|
||||||
// query B => durationNano <= 500ms
|
|
||||||
// query C => durationNano <= 2000ms
|
|
||||||
// Since <= 2000ms includes <= 500ms, we over count, to correct we subtract B/2
|
|
||||||
// so the full expression would be (B + C/2) - B/2 = (B+C)/2
|
|
||||||
APDEX_TRACES = '((B + C)/2)/A',
|
APDEX_TRACES = '((B + C)/2)/A',
|
||||||
// Does the same not apply for delta span metrics?
|
APDEX_DELTA_SPAN_METRICS = '((B + C)/2)/A',
|
||||||
// No, because the delta metrics store the counts just for the current bucket
|
|
||||||
// so we don't need to subtract anything
|
|
||||||
APDEX_DELTA_SPAN_METRICS = '(B + C)/A',
|
|
||||||
// Cumulative span metrics store the counts for all buckets
|
|
||||||
// so we need to subtract B/2 to correct the over counting
|
|
||||||
APDEX_CUMULATIVE_SPAN_METRICS = '((B + C)/2)/A',
|
APDEX_CUMULATIVE_SPAN_METRICS = '((B + C)/2)/A',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,81 +0,0 @@
|
|||||||
.timezone-adaption {
|
|
||||||
padding: 16px;
|
|
||||||
background: var(--bg-ink-400);
|
|
||||||
border: 1px solid var(--bg-ink-500);
|
|
||||||
border-radius: 4px;
|
|
||||||
|
|
||||||
&__header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__title {
|
|
||||||
color: var(--bg-vanilla-300);
|
|
||||||
font-size: 14px;
|
|
||||||
font-weight: 500;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__description {
|
|
||||||
color: var(--bg-vanilla-400);
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 20px;
|
|
||||||
margin: 0 0 12px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__note {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
padding: 7.5px 12px;
|
|
||||||
background: rgba(78, 116, 248, 0.1);
|
|
||||||
border: 1px solid rgba(78, 116, 248, 0.1);
|
|
||||||
border-radius: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__bullet {
|
|
||||||
color: var(--bg-robin-400);
|
|
||||||
font-size: 16px;
|
|
||||||
line-height: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__note-text-container {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__note-text {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 4px;
|
|
||||||
color: var(--bg-robin-400);
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 20px;
|
|
||||||
}
|
|
||||||
&__note-text-overridden {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
padding: 0 2px;
|
|
||||||
background: rgba(171, 189, 255, 0.04);
|
|
||||||
border-radius: 2px;
|
|
||||||
font-size: 12px;
|
|
||||||
line-height: 16px;
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
}
|
|
||||||
&__clear-override {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 6px;
|
|
||||||
background: transparent;
|
|
||||||
border: none;
|
|
||||||
padding: 0;
|
|
||||||
color: var(--bg-robin-300);
|
|
||||||
font-size: 12px;
|
|
||||||
line-height: 16px; /* 133.333% */
|
|
||||||
letter-spacing: 0.12px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
import './TimezoneAdaptation.styles.scss';
|
|
||||||
|
|
||||||
import { Color } from '@signozhq/design-tokens';
|
|
||||||
import { Switch } from 'antd';
|
|
||||||
import { Delete } from 'lucide-react';
|
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useMemo, useState } from 'react';
|
|
||||||
|
|
||||||
function TimezoneAdaptation(): JSX.Element {
|
|
||||||
const { timezone, browserTimezone, updateTimezone } = useTimezone();
|
|
||||||
|
|
||||||
const isTimezoneOverridden = useMemo(
|
|
||||||
() => timezone?.offset !== browserTimezone.offset,
|
|
||||||
[timezone, browserTimezone],
|
|
||||||
);
|
|
||||||
|
|
||||||
const [isAdaptationEnabled, setIsAdaptationEnabled] = useState(true);
|
|
||||||
|
|
||||||
const getSwitchStyles = (): React.CSSProperties => ({
|
|
||||||
backgroundColor:
|
|
||||||
isAdaptationEnabled && isTimezoneOverridden ? Color.BG_AMBER_400 : undefined,
|
|
||||||
});
|
|
||||||
|
|
||||||
const handleOverrideClear = (): void => {
|
|
||||||
updateTimezone(browserTimezone);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="timezone-adaption">
|
|
||||||
<div className="timezone-adaption__header">
|
|
||||||
<h2 className="timezone-adaption__title">Adapt to my timezone</h2>
|
|
||||||
<Switch
|
|
||||||
checked={isAdaptationEnabled}
|
|
||||||
onChange={setIsAdaptationEnabled}
|
|
||||||
style={getSwitchStyles()}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p className="timezone-adaption__description">
|
|
||||||
Adapt the timestamps shown in the SigNoz console to my active timezone.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div className="timezone-adaption__note">
|
|
||||||
<div className="timezone-adaption__note-text-container">
|
|
||||||
<span className="timezone-adaption__bullet">•</span>
|
|
||||||
<span className="timezone-adaption__note-text">
|
|
||||||
{isTimezoneOverridden ? (
|
|
||||||
<>
|
|
||||||
Your current timezone is overridden to
|
|
||||||
<span className="timezone-adaption__note-text-overridden">
|
|
||||||
{timezone?.offset}
|
|
||||||
</span>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
You can override the timezone adaption for any view with the time
|
|
||||||
picker.
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{!!isTimezoneOverridden && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="timezone-adaption__clear-override"
|
|
||||||
onClick={handleOverrideClear}
|
|
||||||
>
|
|
||||||
<Delete height={12} width={12} color={Color.BG_ROBIN_300} />
|
|
||||||
Clear override
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default TimezoneAdaptation;
|
|
||||||
@@ -7,7 +7,6 @@ import { LogOut, Moon, Sun } from 'lucide-react';
|
|||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
|
|
||||||
import Password from './Password';
|
import Password from './Password';
|
||||||
import TimezoneAdaptation from './TimezoneAdaptation/TimezoneAdaptation';
|
|
||||||
import UserInfo from './UserInfo';
|
import UserInfo from './UserInfo';
|
||||||
|
|
||||||
function MySettings(): JSX.Element {
|
function MySettings(): JSX.Element {
|
||||||
@@ -79,8 +78,6 @@ function MySettings(): JSX.Element {
|
|||||||
<Password />
|
<Password />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<TimezoneAdaptation />
|
|
||||||
|
|
||||||
<Button
|
<Button
|
||||||
className="flexBtn"
|
className="flexBtn"
|
||||||
onClick={(): void => Logout()}
|
onClick={(): void => Logout()}
|
||||||
|
|||||||
@@ -14,9 +14,7 @@ import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
|||||||
import { cloneDeep, isEqual, isUndefined } from 'lodash-es';
|
import { cloneDeep, isEqual, isUndefined } from 'lodash-es';
|
||||||
import _noop from 'lodash-es/noop';
|
import _noop from 'lodash-es/noop';
|
||||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||||
import uPlot from 'uplot';
|
|
||||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||||
import { getTimeRange } from 'utils/getTimeRange';
|
import { getTimeRange } from 'utils/getTimeRange';
|
||||||
|
|
||||||
@@ -107,8 +105,6 @@ function UplotPanelWrapper({
|
|||||||
}
|
}
|
||||||
}, [graphVisibility, hiddenGraph, widget.panelTypes, widget?.stackedBarChart]);
|
}, [graphVisibility, hiddenGraph, widget.panelTypes, widget?.stackedBarChart]);
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const options = useMemo(
|
const options = useMemo(
|
||||||
() =>
|
() =>
|
||||||
getUPlotChartOptions({
|
getUPlotChartOptions({
|
||||||
@@ -132,9 +128,6 @@ function UplotPanelWrapper({
|
|||||||
hiddenGraph,
|
hiddenGraph,
|
||||||
setHiddenGraph,
|
setHiddenGraph,
|
||||||
customTooltipElement,
|
customTooltipElement,
|
||||||
tzDate: (timestamp: number) =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
timezone: timezone?.value,
|
|
||||||
}),
|
}),
|
||||||
[
|
[
|
||||||
widget?.id,
|
widget?.id,
|
||||||
@@ -157,7 +150,6 @@ function UplotPanelWrapper({
|
|||||||
currentQuery,
|
currentQuery,
|
||||||
hiddenGraph,
|
hiddenGraph,
|
||||||
customTooltipElement,
|
customTooltipElement,
|
||||||
timezone?.value,
|
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,8 @@
|
|||||||
import { useTimezone } from 'providers/Timezone';
|
import dayjs from 'dayjs';
|
||||||
|
|
||||||
function DeploymentTime(deployTime: string): JSX.Element {
|
function DeploymentTime(deployTime: string): JSX.Element {
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
return (
|
return (
|
||||||
<span>
|
<span>{dayjs(deployTime).locale('en').format('MMMM DD, YYYY hh:mm A')}</span>
|
||||||
{formatTimezoneAdjustedTimestamp(
|
|
||||||
deployTime,
|
|
||||||
'MMMM DD, YYYY hh:mm A (UTC Z)',
|
|
||||||
)}{' '}
|
|
||||||
</span>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ import './styles.scss';
|
|||||||
import { ExpandAltOutlined } from '@ant-design/icons';
|
import { ExpandAltOutlined } from '@ant-design/icons';
|
||||||
import LogDetail from 'components/LogDetail';
|
import LogDetail from 'components/LogDetail';
|
||||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { ILog } from 'types/api/logs/log';
|
import { ILog } from 'types/api/logs/log';
|
||||||
|
|
||||||
function LogsList({ logs }: LogsListProps): JSX.Element {
|
function LogsList({ logs }: LogsListProps): JSX.Element {
|
||||||
@@ -18,17 +18,12 @@ function LogsList({ logs }: LogsListProps): JSX.Element {
|
|||||||
|
|
||||||
const makeLogDetailsHandler = (log: ILog) => (): void => onSetActiveLog(log);
|
const makeLogDetailsHandler = (log: ILog) => (): void => onSetActiveLog(log);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="logs-preview-list-container">
|
<div className="logs-preview-list-container">
|
||||||
{logs.map((log) => (
|
{logs.map((log) => (
|
||||||
<div key={log.id} className="logs-preview-list-item">
|
<div key={log.id} className="logs-preview-list-item">
|
||||||
<div className="logs-preview-list-item-timestamp">
|
<div className="logs-preview-list-item-timestamp">
|
||||||
{formatTimezoneAdjustedTimestamp(
|
{dayjs(log.timestamp).format('MMM DD HH:mm:ss.SSS')}
|
||||||
log.timestamp,
|
|
||||||
'MMM DD HH:mm:ss.SSS (UTC Z)',
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<div className="logs-preview-list-item-body">{log.body}</div>
|
<div className="logs-preview-list-item-body">{log.body}</div>
|
||||||
<div
|
<div
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useTimezone } from 'providers/Timezone';
|
import dayjs from 'dayjs';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { PipelineData, ProcessorData } from 'types/api/pipeline/def';
|
import { PipelineData, ProcessorData } from 'types/api/pipeline/def';
|
||||||
|
|
||||||
@@ -6,18 +6,13 @@ import { PipelineIndexIcon } from '../AddNewProcessor/styles';
|
|||||||
import { ColumnDataStyle, ListDataStyle, ProcessorIndexIcon } from '../styles';
|
import { ColumnDataStyle, ListDataStyle, ProcessorIndexIcon } from '../styles';
|
||||||
import PipelineFilterSummary from './PipelineFilterSummary';
|
import PipelineFilterSummary from './PipelineFilterSummary';
|
||||||
|
|
||||||
function CreatedAtComponent({ record }: { record: Record }): JSX.Element {
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
return (
|
|
||||||
<ColumnDataStyle>
|
|
||||||
{formatTimezoneAdjustedTimestamp(record, 'MMMM DD, YYYY hh:mm A (UTC Z)')}
|
|
||||||
</ColumnDataStyle>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const componentMap: ComponentMap = {
|
const componentMap: ComponentMap = {
|
||||||
orderId: ({ record }) => <PipelineIndexIcon>{record}</PipelineIndexIcon>,
|
orderId: ({ record }) => <PipelineIndexIcon>{record}</PipelineIndexIcon>,
|
||||||
createdAt: ({ record }) => <CreatedAtComponent record={record} />,
|
createdAt: ({ record }) => (
|
||||||
|
<ColumnDataStyle>
|
||||||
|
{dayjs(record).locale('en').format('MMMM DD, YYYY hh:mm A')}
|
||||||
|
</ColumnDataStyle>
|
||||||
|
),
|
||||||
id: ({ record }) => <ProcessorIndexIcon>{record}</ProcessorIndexIcon>,
|
id: ({ record }) => <ProcessorIndexIcon>{record}</ProcessorIndexIcon>,
|
||||||
name: ({ record }) => <ListDataStyle>{record}</ListDataStyle>,
|
name: ({ record }) => <ListDataStyle>{record}</ListDataStyle>,
|
||||||
filter: ({ record }) => <PipelineFilterSummary filter={record} />,
|
filter: ({ record }) => <PipelineFilterSummary filter={record} />,
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ import history from 'lib/history';
|
|||||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||||
import { isEmpty } from 'lodash-es';
|
import { isEmpty } from 'lodash-es';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||||
import { useDispatch, useSelector } from 'react-redux';
|
import { useDispatch, useSelector } from 'react-redux';
|
||||||
import { useLocation } from 'react-router-dom';
|
import { useLocation } from 'react-router-dom';
|
||||||
@@ -27,7 +26,6 @@ import { SuccessResponse } from 'types/api';
|
|||||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
import uPlot from 'uplot';
|
|
||||||
import { getTimeRange } from 'utils/getTimeRange';
|
import { getTimeRange } from 'utils/getTimeRange';
|
||||||
|
|
||||||
import { Container } from './styles';
|
import { Container } from './styles';
|
||||||
@@ -120,8 +118,6 @@ function TimeSeriesView({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
const chartOptions = getUPlotChartOptions({
|
const chartOptions = getUPlotChartOptions({
|
||||||
onDragSelect,
|
onDragSelect,
|
||||||
yAxisUnit: yAxisUnit || '',
|
yAxisUnit: yAxisUnit || '',
|
||||||
@@ -135,9 +131,6 @@ function TimeSeriesView({
|
|||||||
maxTimeScale,
|
maxTimeScale,
|
||||||
softMax: null,
|
softMax: null,
|
||||||
softMin: null,
|
softMin: null,
|
||||||
tzDate: (timestamp: number) =>
|
|
||||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone?.value),
|
|
||||||
timezone: timezone?.value,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ import getTimeString from 'lib/getTimeString';
|
|||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { isObject } from 'lodash-es';
|
import { isObject } from 'lodash-es';
|
||||||
import { Check, Copy, Info, Send, Undo } from 'lucide-react';
|
import { Check, Copy, Info, Send, Undo } from 'lucide-react';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||||
import { useQueryClient } from 'react-query';
|
import { useQueryClient } from 'react-query';
|
||||||
import { connect, useSelector } from 'react-redux';
|
import { connect, useSelector } from 'react-redux';
|
||||||
@@ -614,8 +613,6 @@ function DateTimeSelection({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="date-time-selector">
|
<div className="date-time-selector">
|
||||||
{showResetButton && selectedTime !== defaultRelativeTime && (
|
{showResetButton && selectedTime !== defaultRelativeTime && (
|
||||||
@@ -667,8 +664,8 @@ function DateTimeSelection({
|
|||||||
setIsValidteRelativeTime(isValid);
|
setIsValidteRelativeTime(isValid);
|
||||||
}}
|
}}
|
||||||
selectedValue={getInputLabel(
|
selectedValue={getInputLabel(
|
||||||
dayjs(minTime / 1000000).tz(timezone.value),
|
dayjs(minTime / 1000000),
|
||||||
dayjs(maxTime / 1000000).tz(timezone.value),
|
dayjs(maxTime / 1000000),
|
||||||
selectedTime,
|
selectedTime,
|
||||||
)}
|
)}
|
||||||
data-testid="dropDown"
|
data-testid="dropDown"
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ import history from 'lib/history';
|
|||||||
import { map } from 'lodash-es';
|
import { map } from 'lodash-es';
|
||||||
import { PanelRight } from 'lucide-react';
|
import { PanelRight } from 'lucide-react';
|
||||||
import { SPAN_DETAILS_LEFT_COL_WIDTH } from 'pages/TraceDetail/constants';
|
import { SPAN_DETAILS_LEFT_COL_WIDTH } from 'pages/TraceDetail/constants';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { useEffect, useMemo, useState } from 'react';
|
import { useEffect, useMemo, useState } from 'react';
|
||||||
import { ITraceForest, PayloadProps } from 'types/api/trace/getTraceItem';
|
import { ITraceForest, PayloadProps } from 'types/api/trace/getTraceItem';
|
||||||
import { getSpanTreeMetadata } from 'utils/getSpanTreeMetadata';
|
import { getSpanTreeMetadata } from 'utils/getSpanTreeMetadata';
|
||||||
@@ -140,8 +139,6 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
|||||||
|
|
||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
|
|
||||||
const { timezone } = useTimezone();
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<StyledRow styledclass={[Flex({ flex: 1 })]}>
|
<StyledRow styledclass={[Flex({ flex: 1 })]}>
|
||||||
<StyledCol flex="auto" styledclass={styles.leftContainer}>
|
<StyledCol flex="auto" styledclass={styles.leftContainer}>
|
||||||
@@ -198,9 +195,7 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
|||||||
{isGlobalTimeVisible && (
|
{isGlobalTimeVisible && (
|
||||||
<styles.TimeStampContainer flex={`${SPAN_DETAILS_LEFT_COL_WIDTH}px`}>
|
<styles.TimeStampContainer flex={`${SPAN_DETAILS_LEFT_COL_WIDTH}px`}>
|
||||||
<Typography>
|
<Typography>
|
||||||
{dayjs(traceMetaData.globalStart)
|
{dayjs(traceMetaData.globalStart).format('hh:mm:ss a MM/DD')}
|
||||||
.tz(timezone.value)
|
|
||||||
.format('hh:mm:ss a (UTC Z) MM/DD')}
|
|
||||||
</Typography>
|
</Typography>
|
||||||
</styles.TimeStampContainer>
|
</styles.TimeStampContainer>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import useDragColumns from 'hooks/useDragColumns';
|
|||||||
import { getDraggedColumns } from 'hooks/useDragColumns/utils';
|
import { getDraggedColumns } from 'hooks/useDragColumns/utils';
|
||||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { memo, useCallback, useMemo } from 'react';
|
import { memo, useCallback, useMemo } from 'react';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
@@ -98,15 +97,10 @@ function ListView({ isFilterApplied }: ListViewProps): JSX.Element {
|
|||||||
queryTableDataResult,
|
queryTableDataResult,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const columns = useMemo(() => {
|
const columns = useMemo(() => {
|
||||||
const updatedColumns = getListColumns(
|
const updatedColumns = getListColumns(options?.selectColumns || []);
|
||||||
options?.selectColumns || [],
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
);
|
|
||||||
return getDraggedColumns(updatedColumns, draggedColumns);
|
return getDraggedColumns(updatedColumns, draggedColumns);
|
||||||
}, [options?.selectColumns, formatTimezoneAdjustedTimestamp, draggedColumns]);
|
}, [options?.selectColumns, draggedColumns]);
|
||||||
|
|
||||||
const transformedQueryTableData = useMemo(
|
const transformedQueryTableData = useMemo(
|
||||||
() => transformDataWithDate(queryTableData) || [],
|
() => transformDataWithDate(queryTableData) || [],
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { ColumnsType } from 'antd/es/table';
|
|||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
|
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
|
||||||
import { formUrlParams } from 'container/TraceDetail/utils';
|
import { formUrlParams } from 'container/TraceDetail/utils';
|
||||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
import dayjs from 'dayjs';
|
||||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
import { Link } from 'react-router-dom';
|
import { Link } from 'react-router-dom';
|
||||||
import { ILog } from 'types/api/logs/log';
|
import { ILog } from 'types/api/logs/log';
|
||||||
@@ -40,10 +40,6 @@ export const getTraceLink = (record: RowData): string =>
|
|||||||
|
|
||||||
export const getListColumns = (
|
export const getListColumns = (
|
||||||
selectedColumns: BaseAutocompleteData[],
|
selectedColumns: BaseAutocompleteData[],
|
||||||
formatTimezoneAdjustedTimestamp: (
|
|
||||||
input: TimestampInput,
|
|
||||||
format?: string,
|
|
||||||
) => string | number,
|
|
||||||
): ColumnsType<RowData> => {
|
): ColumnsType<RowData> => {
|
||||||
const initialColumns: ColumnsType<RowData> = [
|
const initialColumns: ColumnsType<RowData> = [
|
||||||
{
|
{
|
||||||
@@ -54,8 +50,8 @@ export const getListColumns = (
|
|||||||
render: (value, item): JSX.Element => {
|
render: (value, item): JSX.Element => {
|
||||||
const date =
|
const date =
|
||||||
typeof value === 'string'
|
typeof value === 'string'
|
||||||
? formatTimezoneAdjustedTimestamp(value, 'YYYY-MM-DD HH:mm:ss.SSS')
|
? dayjs(value).format('YYYY-MM-DD HH:mm:ss.SSS')
|
||||||
: formatTimezoneAdjustedTimestamp(value / 1e6, 'YYYY-MM-DD HH:mm:ss.SSS');
|
: dayjs(value / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
|
||||||
return (
|
return (
|
||||||
<BlockLink to={getTraceLink(item)}>
|
<BlockLink to={getTraceLink(item)}>
|
||||||
<Typography.Text>{date}</Typography.Text>
|
<Typography.Text>{date}</Typography.Text>
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import { Pagination } from 'hooks/queryPagination';
|
|||||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import {
|
import {
|
||||||
Dispatch,
|
Dispatch,
|
||||||
HTMLAttributes,
|
HTMLAttributes,
|
||||||
@@ -50,12 +49,7 @@ function TracesTableComponent({
|
|||||||
}));
|
}));
|
||||||
}, [pagination, setRequestData]);
|
}, [pagination, setRequestData]);
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
const columns = getListColumns(widget.selectedTracesFields || []);
|
||||||
|
|
||||||
const columns = getListColumns(
|
|
||||||
widget.selectedTracesFields || [],
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dataLength =
|
const dataLength =
|
||||||
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
|
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
import { Tag, Typography } from 'antd';
|
import { Tag, Typography } from 'antd';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
|
||||||
|
import getFormattedDate from 'lib/getFormatedDate';
|
||||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||||
|
|
||||||
import Status from '../TableComponents/AlertStatus';
|
import Status from '../TableComponents/AlertStatus';
|
||||||
import { TableCell, TableRow } from './styles';
|
import { TableCell, TableRow } from './styles';
|
||||||
|
|
||||||
function ExapandableRow({ allAlerts }: ExapandableRowProps): JSX.Element {
|
function ExapandableRow({ allAlerts }: ExapandableRowProps): JSX.Element {
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{allAlerts.map((alert) => {
|
{allAlerts.map((alert) => {
|
||||||
@@ -40,9 +40,8 @@ function ExapandableRow({ allAlerts }: ExapandableRowProps): JSX.Element {
|
|||||||
</TableCell>
|
</TableCell>
|
||||||
|
|
||||||
<TableCell>
|
<TableCell>
|
||||||
<Typography>{`${formatTimezoneAdjustedTimestamp(
|
<Typography>{`${getFormattedDate(formatedDate)} ${convertDateToAmAndPm(
|
||||||
formatedDate,
|
formatedDate,
|
||||||
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
|
|
||||||
)}`}</Typography>
|
)}`}</Typography>
|
||||||
</TableCell>
|
</TableCell>
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ import { ColumnsType } from 'antd/lib/table';
|
|||||||
import { ResizeTable } from 'components/ResizeTable';
|
import { ResizeTable } from 'components/ResizeTable';
|
||||||
import LabelColumn from 'components/TableRenderer/LabelColumn';
|
import LabelColumn from 'components/TableRenderer/LabelColumn';
|
||||||
import AlertStatus from 'container/TriggeredAlerts/TableComponents/AlertStatus';
|
import AlertStatus from 'container/TriggeredAlerts/TableComponents/AlertStatus';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import convertDateToAmAndPm from 'lib/convertDateToAmAndPm';
|
||||||
|
import getFormattedDate from 'lib/getFormatedDate';
|
||||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||||
|
|
||||||
import { Value } from './Filter';
|
import { Value } from './Filter';
|
||||||
@@ -15,7 +16,6 @@ function NoFilterTable({
|
|||||||
selectedFilter,
|
selectedFilter,
|
||||||
}: NoFilterTableProps): JSX.Element {
|
}: NoFilterTableProps): JSX.Element {
|
||||||
const filteredAlerts = FilterAlerts(allAlerts, selectedFilter);
|
const filteredAlerts = FilterAlerts(allAlerts, selectedFilter);
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
// need to add the filter
|
// need to add the filter
|
||||||
const columns: ColumnsType<Alerts> = [
|
const columns: ColumnsType<Alerts> = [
|
||||||
@@ -83,12 +83,15 @@ function NoFilterTable({
|
|||||||
width: 100,
|
width: 100,
|
||||||
sorter: (a, b): number =>
|
sorter: (a, b): number =>
|
||||||
new Date(a.startsAt).getTime() - new Date(b.startsAt).getTime(),
|
new Date(a.startsAt).getTime() - new Date(b.startsAt).getTime(),
|
||||||
render: (date): JSX.Element => (
|
render: (date): JSX.Element => {
|
||||||
<Typography>{`${formatTimezoneAdjustedTimestamp(
|
const formatedDate = new Date(date);
|
||||||
date,
|
|
||||||
'MM/DD/YYYY hh:mm:ss A (UTC Z)',
|
return (
|
||||||
)}`}</Typography>
|
<Typography>{`${getFormattedDate(formatedDate)} ${convertDateToAmAndPm(
|
||||||
),
|
formatedDate,
|
||||||
|
)}`}</Typography>
|
||||||
|
);
|
||||||
|
},
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useEffect, useState } from 'react';
|
import { useEffect, useRef, useState } from 'react';
|
||||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||||
|
|
||||||
function useFetch<PayloadProps, FunctionParams>(
|
function useFetch<PayloadProps, FunctionParams>(
|
||||||
@@ -10,7 +10,7 @@ function useFetch<PayloadProps, FunctionParams>(
|
|||||||
(arg0: any): Promise<SuccessResponse<PayloadProps> | ErrorResponse>;
|
(arg0: any): Promise<SuccessResponse<PayloadProps> | ErrorResponse>;
|
||||||
},
|
},
|
||||||
param?: FunctionParams,
|
param?: FunctionParams,
|
||||||
): State<PayloadProps | undefined> & { refetch: () => Promise<void> } {
|
): State<PayloadProps | undefined> {
|
||||||
const [state, setStates] = useState<State<PayloadProps | undefined>>({
|
const [state, setStates] = useState<State<PayloadProps | undefined>>({
|
||||||
loading: true,
|
loading: true,
|
||||||
success: null,
|
success: null,
|
||||||
@@ -19,28 +19,37 @@ function useFetch<PayloadProps, FunctionParams>(
|
|||||||
payload: undefined,
|
payload: undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
const fetchData = useCallback(async (): Promise<void> => {
|
const loadingRef = useRef(0);
|
||||||
setStates((prev) => ({ ...prev, loading: true }));
|
|
||||||
try {
|
|
||||||
const response = await functions(param);
|
|
||||||
|
|
||||||
if (response.statusCode === 200) {
|
useEffect(() => {
|
||||||
setStates({
|
try {
|
||||||
loading: false,
|
(async (): Promise<void> => {
|
||||||
error: false,
|
if (state.loading) {
|
||||||
success: true,
|
const response = await functions(param);
|
||||||
payload: response.payload,
|
|
||||||
errorMessage: '',
|
if (loadingRef.current === 0) {
|
||||||
});
|
loadingRef.current = 1;
|
||||||
} else {
|
|
||||||
setStates({
|
if (response.statusCode === 200) {
|
||||||
loading: false,
|
setStates({
|
||||||
error: true,
|
loading: false,
|
||||||
success: false,
|
error: false,
|
||||||
payload: undefined,
|
success: true,
|
||||||
errorMessage: response.error as string,
|
payload: response.payload,
|
||||||
});
|
errorMessage: '',
|
||||||
}
|
});
|
||||||
|
} else {
|
||||||
|
setStates({
|
||||||
|
loading: false,
|
||||||
|
error: true,
|
||||||
|
success: false,
|
||||||
|
payload: undefined,
|
||||||
|
errorMessage: response.error as string,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
setStates({
|
setStates({
|
||||||
payload: undefined,
|
payload: undefined,
|
||||||
@@ -50,16 +59,13 @@ function useFetch<PayloadProps, FunctionParams>(
|
|||||||
errorMessage: error as string,
|
errorMessage: error as string,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, [functions, param]);
|
return (): void => {
|
||||||
|
loadingRef.current = 1;
|
||||||
// Initial fetch
|
};
|
||||||
useEffect(() => {
|
}, [functions, param, state.loading]);
|
||||||
fetchData();
|
|
||||||
}, [fetchData]);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...state,
|
...state,
|
||||||
refetch: fetchData,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,103 +0,0 @@
|
|||||||
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
|
|
||||||
import dayjs from 'dayjs';
|
|
||||||
import timezone from 'dayjs/plugin/timezone';
|
|
||||||
import utc from 'dayjs/plugin/utc';
|
|
||||||
import { useCallback, useEffect, useMemo } from 'react';
|
|
||||||
|
|
||||||
// Initialize dayjs plugins
|
|
||||||
dayjs.extend(utc);
|
|
||||||
dayjs.extend(timezone);
|
|
||||||
|
|
||||||
// Types
|
|
||||||
export type TimestampInput = string | number | Date;
|
|
||||||
interface CacheEntry {
|
|
||||||
value: string;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
|
|
||||||
// Constants
|
|
||||||
const CACHE_SIZE_LIMIT = 1000;
|
|
||||||
const CACHE_CLEANUP_PERCENTAGE = 0.5; // Remove 50% when limit is reached
|
|
||||||
|
|
||||||
function useTimezoneFormatter({
|
|
||||||
userTimezone,
|
|
||||||
}: {
|
|
||||||
userTimezone: Timezone;
|
|
||||||
}): {
|
|
||||||
formatTimezoneAdjustedTimestamp: (
|
|
||||||
input: TimestampInput,
|
|
||||||
format?: string,
|
|
||||||
) => string;
|
|
||||||
} {
|
|
||||||
// Initialize cache using useMemo to persist between renders
|
|
||||||
const cache = useMemo(() => new Map<string, CacheEntry>(), []);
|
|
||||||
|
|
||||||
// Clear cache when timezone changes
|
|
||||||
useEffect(() => {
|
|
||||||
cache.clear();
|
|
||||||
}, [cache, userTimezone]);
|
|
||||||
|
|
||||||
const clearCacheEntries = useCallback(() => {
|
|
||||||
if (cache.size <= CACHE_SIZE_LIMIT) return;
|
|
||||||
|
|
||||||
// Sort entries by timestamp (oldest first)
|
|
||||||
const sortedEntries = Array.from(cache.entries()).sort(
|
|
||||||
(a, b) => a[1].timestamp - b[1].timestamp,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Calculate how many entries to remove (50% or overflow, whichever is larger)
|
|
||||||
const entriesToRemove = Math.max(
|
|
||||||
Math.floor(cache.size * CACHE_CLEANUP_PERCENTAGE),
|
|
||||||
cache.size - CACHE_SIZE_LIMIT,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Remove oldest entries
|
|
||||||
sortedEntries.slice(0, entriesToRemove).forEach(([key]) => cache.delete(key));
|
|
||||||
}, [cache]);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Formats a timestamp with the user's timezone and caches the result
|
|
||||||
* @param {TimestampInput} input - The timestamp to format (string, number, or Date)
|
|
||||||
* @param {string} [format='YYYY-MM-DD HH:mm:ss'] - The desired output format
|
|
||||||
* @returns {string} The formatted timestamp string in the user's timezone
|
|
||||||
* @example
|
|
||||||
* // Input: UTC timestamp
|
|
||||||
* // User timezone: 'UTC - 4'
|
|
||||||
* // Returns: "2024-03-14 15:30:00"
|
|
||||||
* formatTimezoneAdjustedTimestamp('2024-03-14T19:30:00Z')
|
|
||||||
*/
|
|
||||||
const formatTimezoneAdjustedTimestamp = useCallback(
|
|
||||||
(input: TimestampInput, format = 'YYYY-MM-DD HH:mm:ss'): string => {
|
|
||||||
const timestamp = dayjs(input).valueOf();
|
|
||||||
const cacheKey = `${timestamp}_${userTimezone?.value}`;
|
|
||||||
|
|
||||||
// Check cache first
|
|
||||||
const cachedValue = cache.get(cacheKey);
|
|
||||||
if (cachedValue) {
|
|
||||||
return cachedValue.value;
|
|
||||||
}
|
|
||||||
// Format timestamp
|
|
||||||
const formattedValue = dayjs(input).tz(userTimezone?.value).format(format);
|
|
||||||
|
|
||||||
// Update cache
|
|
||||||
cache.set(cacheKey, {
|
|
||||||
value: formattedValue,
|
|
||||||
timestamp: Date.now(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Clear expired entries and enforce size limit
|
|
||||||
if (cache.size > CACHE_SIZE_LIMIT) {
|
|
||||||
clearCacheEntries();
|
|
||||||
}
|
|
||||||
|
|
||||||
return formattedValue;
|
|
||||||
},
|
|
||||||
[cache, clearCacheEntries, userTimezone],
|
|
||||||
);
|
|
||||||
|
|
||||||
return { formatTimezoneAdjustedTimestamp };
|
|
||||||
}
|
|
||||||
|
|
||||||
export default useTimezoneFormatter;
|
|
||||||
@@ -7,7 +7,6 @@ import { AxiosError } from 'axios';
|
|||||||
import { ThemeProvider } from 'hooks/useDarkMode';
|
import { ThemeProvider } from 'hooks/useDarkMode';
|
||||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||||
import posthog from 'posthog-js';
|
import posthog from 'posthog-js';
|
||||||
import TimezoneProvider from 'providers/Timezone';
|
|
||||||
import { createRoot } from 'react-dom/client';
|
import { createRoot } from 'react-dom/client';
|
||||||
import { HelmetProvider } from 'react-helmet-async';
|
import { HelmetProvider } from 'react-helmet-async';
|
||||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||||
@@ -70,16 +69,14 @@ if (container) {
|
|||||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||||
<HelmetProvider>
|
<HelmetProvider>
|
||||||
<ThemeProvider>
|
<ThemeProvider>
|
||||||
<TimezoneProvider>
|
<QueryClientProvider client={queryClient}>
|
||||||
<QueryClientProvider client={queryClient}>
|
<Provider store={store}>
|
||||||
<Provider store={store}>
|
<AppRoutes />
|
||||||
<AppRoutes />
|
</Provider>
|
||||||
</Provider>
|
{process.env.NODE_ENV === 'development' && (
|
||||||
{process.env.NODE_ENV === 'development' && (
|
<ReactQueryDevtools initialIsOpen={false} />
|
||||||
<ReactQueryDevtools initialIsOpen={false} />
|
)}
|
||||||
)}
|
</QueryClientProvider>
|
||||||
</QueryClientProvider>
|
|
||||||
</TimezoneProvider>
|
|
||||||
</ThemeProvider>
|
</ThemeProvider>
|
||||||
</HelmetProvider>
|
</HelmetProvider>
|
||||||
</Sentry.ErrorBoundary>,
|
</Sentry.ErrorBoundary>,
|
||||||
|
|||||||
@@ -55,8 +55,6 @@ export interface GetUPlotChartOptions {
|
|||||||
>;
|
>;
|
||||||
customTooltipElement?: HTMLDivElement;
|
customTooltipElement?: HTMLDivElement;
|
||||||
verticalLineTimestamp?: number;
|
verticalLineTimestamp?: number;
|
||||||
tzDate?: (timestamp: number) => Date;
|
|
||||||
timezone?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** the function converts series A , series B , series C to
|
/** the function converts series A , series B , series C to
|
||||||
@@ -160,8 +158,6 @@ export const getUPlotChartOptions = ({
|
|||||||
setHiddenGraph,
|
setHiddenGraph,
|
||||||
customTooltipElement,
|
customTooltipElement,
|
||||||
verticalLineTimestamp,
|
verticalLineTimestamp,
|
||||||
tzDate,
|
|
||||||
timezone,
|
|
||||||
}: GetUPlotChartOptions): uPlot.Options => {
|
}: GetUPlotChartOptions): uPlot.Options => {
|
||||||
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
|
const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale);
|
||||||
|
|
||||||
@@ -200,7 +196,6 @@ export const getUPlotChartOptions = ({
|
|||||||
fill: (): string => '#fff',
|
fill: (): string => '#fff',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
tzDate,
|
|
||||||
padding: [16, 16, 8, 8],
|
padding: [16, 16, 8, 8],
|
||||||
bands,
|
bands,
|
||||||
scales: {
|
scales: {
|
||||||
@@ -227,7 +222,6 @@ export const getUPlotChartOptions = ({
|
|||||||
stackBarChart,
|
stackBarChart,
|
||||||
isDarkMode,
|
isDarkMode,
|
||||||
customTooltipElement,
|
customTooltipElement,
|
||||||
timezone,
|
|
||||||
}),
|
}),
|
||||||
onClickPlugin({
|
onClickPlugin({
|
||||||
onClick: onClickHandler,
|
onClick: onClickHandler,
|
||||||
|
|||||||
@@ -46,7 +46,6 @@ const generateTooltipContent = (
|
|||||||
isHistogramGraphs?: boolean,
|
isHistogramGraphs?: boolean,
|
||||||
isMergedSeries?: boolean,
|
isMergedSeries?: boolean,
|
||||||
stackBarChart?: boolean,
|
stackBarChart?: boolean,
|
||||||
timezone?: string,
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
): HTMLElement => {
|
): HTMLElement => {
|
||||||
const container = document.createElement('div');
|
const container = document.createElement('div');
|
||||||
@@ -70,13 +69,9 @@ const generateTooltipContent = (
|
|||||||
series.forEach((item, index) => {
|
series.forEach((item, index) => {
|
||||||
if (index === 0) {
|
if (index === 0) {
|
||||||
if (isBillingUsageGraphs) {
|
if (isBillingUsageGraphs) {
|
||||||
tooltipTitle = dayjs(data[0][idx] * 1000)
|
tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY');
|
||||||
.tz(timezone)
|
|
||||||
.format('MMM DD YYYY');
|
|
||||||
} else {
|
} else {
|
||||||
tooltipTitle = dayjs(data[0][idx] * 1000)
|
tooltipTitle = dayjs(data[0][idx] * 1000).format('MMM DD YYYY HH:mm:ss');
|
||||||
.tz(timezone)
|
|
||||||
.format('MMM DD YYYY h:mm:ss A');
|
|
||||||
}
|
}
|
||||||
} else if (item.show) {
|
} else if (item.show) {
|
||||||
const {
|
const {
|
||||||
@@ -228,7 +223,6 @@ type ToolTipPluginProps = {
|
|||||||
stackBarChart?: boolean;
|
stackBarChart?: boolean;
|
||||||
isDarkMode: boolean;
|
isDarkMode: boolean;
|
||||||
customTooltipElement?: HTMLDivElement;
|
customTooltipElement?: HTMLDivElement;
|
||||||
timezone?: string;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const tooltipPlugin = ({
|
const tooltipPlugin = ({
|
||||||
@@ -240,7 +234,6 @@ const tooltipPlugin = ({
|
|||||||
stackBarChart,
|
stackBarChart,
|
||||||
isDarkMode,
|
isDarkMode,
|
||||||
customTooltipElement,
|
customTooltipElement,
|
||||||
timezone,
|
|
||||||
}: // eslint-disable-next-line sonarjs/cognitive-complexity
|
}: // eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
ToolTipPluginProps): any => {
|
ToolTipPluginProps): any => {
|
||||||
let over: HTMLElement;
|
let over: HTMLElement;
|
||||||
@@ -307,7 +300,6 @@ ToolTipPluginProps): any => {
|
|||||||
isHistogramGraphs,
|
isHistogramGraphs,
|
||||||
isMergedSeries,
|
isMergedSeries,
|
||||||
stackBarChart,
|
stackBarChart,
|
||||||
timezone,
|
|
||||||
);
|
);
|
||||||
if (customTooltipElement) {
|
if (customTooltipElement) {
|
||||||
content.appendChild(customTooltipElement);
|
content.appendChild(customTooltipElement);
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ import {
|
|||||||
Trash2,
|
Trash2,
|
||||||
X,
|
X,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
|
||||||
import { ChangeEvent, useEffect, useRef, useState } from 'react';
|
import { ChangeEvent, useEffect, useRef, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
@@ -208,8 +207,6 @@ function SaveView(): JSX.Element {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
|
||||||
|
|
||||||
const columns: TableProps<ViewProps>['columns'] = [
|
const columns: TableProps<ViewProps>['columns'] = [
|
||||||
{
|
{
|
||||||
title: 'Save View',
|
title: 'Save View',
|
||||||
@@ -221,10 +218,31 @@ function SaveView(): JSX.Element {
|
|||||||
bgColor = extraData.color;
|
bgColor = extraData.color;
|
||||||
}
|
}
|
||||||
|
|
||||||
const formattedDateAndTime = formatTimezoneAdjustedTimestamp(
|
const timeOptions: Intl.DateTimeFormatOptions = {
|
||||||
view.createdAt,
|
hour: '2-digit',
|
||||||
'HH:mm:ss ⎯ MMM D, YYYY (UTC Z)',
|
minute: '2-digit',
|
||||||
|
second: '2-digit',
|
||||||
|
hour12: false,
|
||||||
|
};
|
||||||
|
const formattedTime = new Date(view.createdAt).toLocaleTimeString(
|
||||||
|
'en-US',
|
||||||
|
timeOptions,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const dateOptions: Intl.DateTimeFormatOptions = {
|
||||||
|
month: 'short',
|
||||||
|
day: 'numeric',
|
||||||
|
year: 'numeric',
|
||||||
|
};
|
||||||
|
|
||||||
|
const formattedDate = new Date(view.createdAt).toLocaleDateString(
|
||||||
|
'en-US',
|
||||||
|
dateOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Combine time and date
|
||||||
|
const formattedDateAndTime = `${formattedTime} ⎯ ${formattedDate}`;
|
||||||
|
|
||||||
const isEditDeleteSupported = allowedRoles.includes(role as string);
|
const isEditDeleteSupported = allowedRoles.includes(role as string);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -1,98 +0,0 @@
|
|||||||
import {
|
|
||||||
getBrowserTimezone,
|
|
||||||
getTimezoneObjectByTimezoneString,
|
|
||||||
Timezone,
|
|
||||||
} from 'components/CustomTimePicker/timezoneUtils';
|
|
||||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
|
||||||
import useTimezoneFormatter, {
|
|
||||||
TimestampInput,
|
|
||||||
} from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
|
||||||
import React, {
|
|
||||||
createContext,
|
|
||||||
useCallback,
|
|
||||||
useContext,
|
|
||||||
useMemo,
|
|
||||||
useState,
|
|
||||||
} from 'react';
|
|
||||||
|
|
||||||
interface TimezoneContextType {
|
|
||||||
timezone: Timezone;
|
|
||||||
browserTimezone: Timezone;
|
|
||||||
updateTimezone: (timezone: Timezone) => void;
|
|
||||||
formatTimezoneAdjustedTimestamp: (
|
|
||||||
input: TimestampInput,
|
|
||||||
format?: string,
|
|
||||||
) => string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const TimezoneContext = createContext<TimezoneContextType | undefined>(
|
|
||||||
undefined,
|
|
||||||
);
|
|
||||||
|
|
||||||
function TimezoneProvider({
|
|
||||||
children,
|
|
||||||
}: {
|
|
||||||
children: React.ReactNode;
|
|
||||||
}): JSX.Element {
|
|
||||||
const getStoredTimezoneValue = (): Timezone | null => {
|
|
||||||
try {
|
|
||||||
const timezoneValue = localStorage.getItem(LOCALSTORAGE.PREFERRED_TIMEZONE);
|
|
||||||
if (timezoneValue) {
|
|
||||||
return getTimezoneObjectByTimezoneString(timezoneValue);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error reading timezone from localStorage:', error);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
const setStoredTimezoneValue = (value: string): void => {
|
|
||||||
try {
|
|
||||||
localStorage.setItem(LOCALSTORAGE.PREFERRED_TIMEZONE, value);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error saving timezone to localStorage:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const browserTimezone = useMemo(() => getBrowserTimezone(), []);
|
|
||||||
|
|
||||||
const [timezone, setTimezone] = useState<Timezone>(
|
|
||||||
getStoredTimezoneValue() ?? browserTimezone,
|
|
||||||
);
|
|
||||||
|
|
||||||
const updateTimezone = useCallback((timezone: Timezone): void => {
|
|
||||||
if (!timezone.value) return;
|
|
||||||
|
|
||||||
// TODO(shaheer): replace this with user preferences API
|
|
||||||
setStoredTimezoneValue(timezone.value);
|
|
||||||
setTimezone(timezone);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezoneFormatter({
|
|
||||||
userTimezone: timezone,
|
|
||||||
});
|
|
||||||
|
|
||||||
const value = React.useMemo(
|
|
||||||
() => ({
|
|
||||||
timezone,
|
|
||||||
browserTimezone,
|
|
||||||
updateTimezone,
|
|
||||||
formatTimezoneAdjustedTimestamp,
|
|
||||||
}),
|
|
||||||
[timezone, browserTimezone, updateTimezone, formatTimezoneAdjustedTimestamp],
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<TimezoneContext.Provider value={value}>{children}</TimezoneContext.Provider>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useTimezone = (): TimezoneContextType => {
|
|
||||||
const context = useContext(TimezoneContext);
|
|
||||||
if (context === undefined) {
|
|
||||||
throw new Error('useTimezone must be used within a TimezoneProvider');
|
|
||||||
}
|
|
||||||
return context;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default TimezoneProvider;
|
|
||||||
@@ -8,21 +8,17 @@ export interface LimitProps {
|
|||||||
config?: {
|
config?: {
|
||||||
day?: {
|
day?: {
|
||||||
size?: number;
|
size?: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
second?: {
|
second?: {
|
||||||
size?: number;
|
size?: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
metric?: {
|
metric?: {
|
||||||
day?: {
|
day?: {
|
||||||
size?: number;
|
size?: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
second?: {
|
second?: {
|
||||||
size?: number;
|
size?: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -31,13 +27,11 @@ export interface AddLimitProps {
|
|||||||
keyID: string;
|
keyID: string;
|
||||||
signal: string;
|
signal: string;
|
||||||
config: {
|
config: {
|
||||||
day?: {
|
day: {
|
||||||
size?: number;
|
size: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
second?: {
|
second: {
|
||||||
size?: number;
|
size: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -46,13 +40,11 @@ export interface UpdateLimitProps {
|
|||||||
limitID: string;
|
limitID: string;
|
||||||
signal: string;
|
signal: string;
|
||||||
config: {
|
config: {
|
||||||
day?: {
|
day: {
|
||||||
size?: number;
|
size: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
second?: {
|
second: {
|
||||||
size?: number;
|
size: number;
|
||||||
enabled?: boolean;
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4764,11 +4764,6 @@
|
|||||||
d3-time-format "4.1.0"
|
d3-time-format "4.1.0"
|
||||||
internmap "2.0.3"
|
internmap "2.0.3"
|
||||||
|
|
||||||
"@vvo/tzdb@6.149.0":
|
|
||||||
version "6.149.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@vvo/tzdb/-/tzdb-6.149.0.tgz#e4fcca3c49b90d5910a8679267540cb532809075"
|
|
||||||
integrity sha512-d68+oW1TE60Ho9FlCDO5Ks4suk6hp5umjNIrtWytVB0B/X0/P1T9yWdnH7EhNb2fx1CQE+MM1qmLUGzT+QAqdw==
|
|
||||||
|
|
||||||
"@webassemblyjs/ast@1.12.1":
|
"@webassemblyjs/ast@1.12.1":
|
||||||
version "1.12.1"
|
version "1.12.1"
|
||||||
resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb"
|
resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb"
|
||||||
|
|||||||
@@ -766,6 +766,307 @@ func buildFilterArrayQuery(_ context.Context, excludeMap map[string]struct{}, pa
|
|||||||
return args
|
return args
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *model.SpanFilterParams) (*model.SpanFiltersResponse, *model.ApiError) {
|
||||||
|
|
||||||
|
var query string
|
||||||
|
excludeMap := make(map[string]struct{})
|
||||||
|
for _, e := range queryParams.Exclude {
|
||||||
|
if e == constants.OperationRequest {
|
||||||
|
excludeMap[constants.OperationDB] = struct{}{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
excludeMap[e] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
if len(queryParams.TraceID) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ServiceName) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpRoute) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpHost) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpUrl) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.Operation) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.RPCMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ResponseStatusCode) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= @durationNanoMin"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= @durationNanoMax"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.SpanKind) != 0 {
|
||||||
|
query = query + " AND kind = @kind"
|
||||||
|
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
|
||||||
|
}
|
||||||
|
|
||||||
|
query = getStatusFilters(query, queryParams.Status, excludeMap)
|
||||||
|
|
||||||
|
traceFilterReponse := model.SpanFiltersResponse{
|
||||||
|
Status: map[string]uint64{},
|
||||||
|
Duration: map[string]uint64{},
|
||||||
|
ServiceName: map[string]uint64{},
|
||||||
|
Operation: map[string]uint64{},
|
||||||
|
ResponseStatusCode: map[string]uint64{},
|
||||||
|
RPCMethod: map[string]uint64{},
|
||||||
|
HttpMethod: map[string]uint64{},
|
||||||
|
HttpUrl: map[string]uint64{},
|
||||||
|
HttpRoute: map[string]uint64{},
|
||||||
|
HttpHost: map[string]uint64{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range queryParams.GetFilters {
|
||||||
|
switch e {
|
||||||
|
case constants.TraceID:
|
||||||
|
continue
|
||||||
|
case constants.ServiceName:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT serviceName, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY serviceName"
|
||||||
|
var dBResponse []model.DBResponseServiceName
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.ServiceName != "" {
|
||||||
|
traceFilterReponse.ServiceName[service.ServiceName] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.HttpRoute:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT httpRoute, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY httpRoute"
|
||||||
|
var dBResponse []model.DBResponseHttpRoute
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.HttpRoute != "" {
|
||||||
|
traceFilterReponse.HttpRoute[service.HttpRoute] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.HttpUrl:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT httpUrl, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY httpUrl"
|
||||||
|
var dBResponse []model.DBResponseHttpUrl
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.HttpUrl != "" {
|
||||||
|
traceFilterReponse.HttpUrl[service.HttpUrl] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.HttpMethod:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT httpMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY httpMethod"
|
||||||
|
var dBResponse []model.DBResponseHttpMethod
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.HttpMethod != "" {
|
||||||
|
traceFilterReponse.HttpMethod[service.HttpMethod] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.HttpHost:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT httpHost, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY httpHost"
|
||||||
|
var dBResponse []model.DBResponseHttpHost
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.HttpHost != "" {
|
||||||
|
traceFilterReponse.HttpHost[service.HttpHost] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.OperationRequest:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT name, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY name"
|
||||||
|
var dBResponse []model.DBResponseOperation
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.Operation != "" {
|
||||||
|
traceFilterReponse.Operation[service.Operation] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.Status:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = true", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
var dBResponse []model.DBResponseTotal
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
|
||||||
|
finalQuery2 := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = false", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery2 += query
|
||||||
|
var dBResponse2 []model.DBResponseTotal
|
||||||
|
err = r.db.Select(ctx, &dBResponse2, finalQuery2, args...)
|
||||||
|
zap.L().Info(finalQuery2)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
if len(dBResponse) > 0 && len(dBResponse2) > 0 {
|
||||||
|
traceFilterReponse.Status = map[string]uint64{"ok": dBResponse2[0].NumTotal, "error": dBResponse[0].NumTotal}
|
||||||
|
} else if len(dBResponse) > 0 {
|
||||||
|
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": dBResponse[0].NumTotal}
|
||||||
|
} else if len(dBResponse2) > 0 {
|
||||||
|
traceFilterReponse.Status = map[string]uint64{"ok": dBResponse2[0].NumTotal, "error": 0}
|
||||||
|
} else {
|
||||||
|
traceFilterReponse.Status = map[string]uint64{"ok": 0, "error": 0}
|
||||||
|
}
|
||||||
|
case constants.Duration:
|
||||||
|
err := r.featureFlags.CheckFeature(constants.DurationSort)
|
||||||
|
durationSortEnabled := err == nil
|
||||||
|
finalQuery := ""
|
||||||
|
if !durationSortEnabled {
|
||||||
|
// if duration sort is not enabled, we need to get the min and max duration from the index table
|
||||||
|
finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
var dBResponse []model.DBResponseMinMax
|
||||||
|
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
if len(dBResponse) > 0 {
|
||||||
|
traceFilterReponse.Duration = map[string]uint64{"minDuration": dBResponse[0].Min, "maxDuration": dBResponse[0].Max}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// when duration sort is enabled, we need to get the min and max duration from the duration table
|
||||||
|
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " ORDER BY durationNano LIMIT 1"
|
||||||
|
var dBResponse []model.DBResponseTotal
|
||||||
|
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
|
||||||
|
finalQuery = fmt.Sprintf("SELECT durationNano as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.durationTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " ORDER BY durationNano DESC LIMIT 1"
|
||||||
|
var dBResponse2 []model.DBResponseTotal
|
||||||
|
err = r.db.Select(ctx, &dBResponse2, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
if len(dBResponse) > 0 {
|
||||||
|
traceFilterReponse.Duration["minDuration"] = dBResponse[0].NumTotal
|
||||||
|
}
|
||||||
|
if len(dBResponse2) > 0 {
|
||||||
|
traceFilterReponse.Duration["maxDuration"] = dBResponse2[0].NumTotal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case constants.RPCMethod:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY rpcMethod"
|
||||||
|
var dBResponse []model.DBResponseRPCMethod
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.RPCMethod != "" {
|
||||||
|
traceFilterReponse.RPCMethod[service.RPCMethod] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case constants.ResponseStatusCode:
|
||||||
|
finalQuery := fmt.Sprintf("SELECT responseStatusCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " GROUP BY responseStatusCode"
|
||||||
|
var dBResponse []model.DBResponseStatusCodeMethod
|
||||||
|
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||||
|
zap.L().Info(finalQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||||
|
}
|
||||||
|
for _, service := range dBResponse {
|
||||||
|
if service.ResponseStatusCode != "" {
|
||||||
|
traceFilterReponse.ResponseStatusCode[service.ResponseStatusCode] = service.Count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("filter type: %s not supported", e)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &traceFilterReponse, nil
|
||||||
|
}
|
||||||
|
|
||||||
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
|
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
|
||||||
|
|
||||||
// status can only be two and if both are selected than they are equivalent to none selected
|
// status can only be two and if both are selected than they are equivalent to none selected
|
||||||
@@ -787,6 +1088,140 @@ func getStatusFilters(query string, statusParams []string, excludeMap map[string
|
|||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError) {
|
||||||
|
|
||||||
|
queryTable := fmt.Sprintf("%s.%s", r.TraceDB, r.indexTable)
|
||||||
|
|
||||||
|
excludeMap := make(map[string]struct{})
|
||||||
|
for _, e := range queryParams.Exclude {
|
||||||
|
if e == constants.OperationRequest {
|
||||||
|
excludeMap[constants.OperationDB] = struct{}{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
excludeMap[e] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
var query string
|
||||||
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
if len(queryParams.TraceID) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ServiceName) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpRoute) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpHost) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpUrl) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.Operation) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.RPCMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.ResponseStatusCode) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= @durationNanoMin"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= @durationNanoMax"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
|
||||||
|
}
|
||||||
|
query = getStatusFilters(query, queryParams.Status, excludeMap)
|
||||||
|
|
||||||
|
if len(queryParams.SpanKind) != 0 {
|
||||||
|
query = query + " AND kind = @kind"
|
||||||
|
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
|
||||||
|
}
|
||||||
|
|
||||||
|
// create TagQuery from TagQueryParams
|
||||||
|
tags := createTagQueryFromTagQueryParams(queryParams.Tags)
|
||||||
|
subQuery, argsSubQuery, errStatus := buildQueryWithTagParams(ctx, tags)
|
||||||
|
query += subQuery
|
||||||
|
args = append(args, argsSubQuery...)
|
||||||
|
if errStatus != nil {
|
||||||
|
return nil, errStatus
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.OrderParam) != 0 {
|
||||||
|
if queryParams.OrderParam == constants.Duration {
|
||||||
|
queryTable = fmt.Sprintf("%s.%s", r.TraceDB, r.durationTable)
|
||||||
|
if queryParams.Order == constants.Descending {
|
||||||
|
query = query + " ORDER BY durationNano DESC"
|
||||||
|
}
|
||||||
|
if queryParams.Order == constants.Ascending {
|
||||||
|
query = query + " ORDER BY durationNano ASC"
|
||||||
|
}
|
||||||
|
} else if queryParams.OrderParam == constants.Timestamp {
|
||||||
|
projectionOptQuery := "SET allow_experimental_projection_optimization = 1"
|
||||||
|
err := r.db.Exec(ctx, projectionOptQuery)
|
||||||
|
|
||||||
|
zap.L().Info(projectionOptQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||||
|
}
|
||||||
|
if queryParams.Order == constants.Descending {
|
||||||
|
query = query + " ORDER BY timestamp DESC"
|
||||||
|
}
|
||||||
|
if queryParams.Order == constants.Ascending {
|
||||||
|
query = query + " ORDER BY timestamp ASC"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if queryParams.Limit > 0 {
|
||||||
|
query = query + " LIMIT @limit"
|
||||||
|
args = append(args, clickhouse.Named("limit", queryParams.Limit))
|
||||||
|
}
|
||||||
|
|
||||||
|
if queryParams.Offset > 0 {
|
||||||
|
query = query + " OFFSET @offset"
|
||||||
|
args = append(args, clickhouse.Named("offset", queryParams.Offset))
|
||||||
|
}
|
||||||
|
|
||||||
|
var getFilterSpansResponseItems []model.GetFilterSpansResponseItem
|
||||||
|
|
||||||
|
baseQuery := fmt.Sprintf("SELECT timestamp, spanID, traceID, serviceName, name, durationNano, httpMethod, rpcMethod, responseStatusCode FROM %s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryTable)
|
||||||
|
baseQuery += query
|
||||||
|
err := r.db.Select(ctx, &getFilterSpansResponseItems, baseQuery, args...)
|
||||||
|
// Fill status and method
|
||||||
|
for i, e := range getFilterSpansResponseItems {
|
||||||
|
if e.RPCMethod != "" {
|
||||||
|
getFilterSpansResponseItems[i].Method = e.RPCMethod
|
||||||
|
} else {
|
||||||
|
getFilterSpansResponseItems[i].Method = e.HttpMethod
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
zap.L().Info(baseQuery)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||||
|
}
|
||||||
|
|
||||||
|
getFilterSpansResponse := model.GetFilterSpansResponse{
|
||||||
|
Spans: getFilterSpansResponseItems,
|
||||||
|
TotalSpans: 1000,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &getFilterSpansResponse, nil
|
||||||
|
}
|
||||||
|
|
||||||
func createTagQueryFromTagQueryParams(queryParams []model.TagQueryParam) []model.TagQuery {
|
func createTagQueryFromTagQueryParams(queryParams []model.TagQueryParam) []model.TagQuery {
|
||||||
tags := []model.TagQuery{}
|
tags := []model.TagQuery{}
|
||||||
for _, tag := range queryParams {
|
for _, tag := range queryParams {
|
||||||
@@ -944,6 +1379,87 @@ func addExistsOperator(item model.TagQuery, tagMapType string, not bool) (string
|
|||||||
return fmt.Sprintf(" AND %s (%s)", notStr, strings.Join(tagOperatorPair, " OR ")), args
|
return fmt.Sprintf(" AND %s (%s)", notStr, strings.Join(tagOperatorPair, " OR ")), args
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetTagFilters(ctx context.Context, queryParams *model.TagFilterParams) (*model.TagFilters, *model.ApiError) {
|
||||||
|
|
||||||
|
excludeMap := make(map[string]struct{})
|
||||||
|
for _, e := range queryParams.Exclude {
|
||||||
|
if e == constants.OperationRequest {
|
||||||
|
excludeMap[constants.OperationDB] = struct{}{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
excludeMap[e] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
var query string
|
||||||
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
if len(queryParams.TraceID) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ServiceName) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpRoute) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpHost) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpUrl) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.Operation) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.RPCMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ResponseStatusCode) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= @durationNanoMin"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= @durationNanoMax"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.SpanKind) != 0 {
|
||||||
|
query = query + " AND kind = @kind"
|
||||||
|
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
|
||||||
|
}
|
||||||
|
|
||||||
|
query = getStatusFilters(query, queryParams.Status, excludeMap)
|
||||||
|
|
||||||
|
tagFilters := []model.TagFilters{}
|
||||||
|
|
||||||
|
// Alternative finalQuery := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagMap.keys) as tagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
|
||||||
|
finalQuery := fmt.Sprintf(`SELECT groupUniqArrayArray(mapKeys(stringTagMap)) as stringTagKeys, groupUniqArrayArray(mapKeys(numberTagMap)) as numberTagKeys, groupUniqArrayArray(mapKeys(boolTagMap)) as boolTagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
err := r.db.Select(ctx, &tagFilters, finalQuery, args...)
|
||||||
|
|
||||||
|
zap.L().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||||
|
}
|
||||||
|
tagFiltersResult := model.TagFilters{
|
||||||
|
StringTagKeys: make([]string, 0),
|
||||||
|
NumberTagKeys: make([]string, 0),
|
||||||
|
BoolTagKeys: make([]string, 0),
|
||||||
|
}
|
||||||
|
if len(tagFilters) != 0 {
|
||||||
|
tagFiltersResult.StringTagKeys = excludeTags(ctx, tagFilters[0].StringTagKeys)
|
||||||
|
tagFiltersResult.NumberTagKeys = excludeTags(ctx, tagFilters[0].NumberTagKeys)
|
||||||
|
tagFiltersResult.BoolTagKeys = excludeTags(ctx, tagFilters[0].BoolTagKeys)
|
||||||
|
}
|
||||||
|
return &tagFiltersResult, nil
|
||||||
|
}
|
||||||
|
|
||||||
func excludeTags(_ context.Context, tags []string) []string {
|
func excludeTags(_ context.Context, tags []string) []string {
|
||||||
excludedTagsMap := map[string]bool{
|
excludedTagsMap := map[string]bool{
|
||||||
"http.code": true,
|
"http.code": true,
|
||||||
@@ -967,6 +1483,102 @@ func excludeTags(_ context.Context, tags []string) []string {
|
|||||||
return newTags
|
return newTags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetTagValues(ctx context.Context, queryParams *model.TagFilterParams) (*model.TagValues, *model.ApiError) {
|
||||||
|
|
||||||
|
if queryParams.TagKey.Type == model.TagTypeNumber {
|
||||||
|
return &model.TagValues{
|
||||||
|
NumberTagValues: make([]float64, 0),
|
||||||
|
StringTagValues: make([]string, 0),
|
||||||
|
BoolTagValues: make([]bool, 0),
|
||||||
|
}, nil
|
||||||
|
} else if queryParams.TagKey.Type == model.TagTypeBool {
|
||||||
|
return &model.TagValues{
|
||||||
|
NumberTagValues: make([]float64, 0),
|
||||||
|
StringTagValues: make([]string, 0),
|
||||||
|
BoolTagValues: []bool{true, false},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
excludeMap := make(map[string]struct{})
|
||||||
|
for _, e := range queryParams.Exclude {
|
||||||
|
if e == constants.OperationRequest {
|
||||||
|
excludeMap[constants.OperationDB] = struct{}{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
excludeMap[e] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
var query string
|
||||||
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
if len(queryParams.TraceID) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ServiceName) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpRoute) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpHost) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpUrl) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.Operation) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= @durationNanoMin"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= @durationNanoMax"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.SpanKind) != 0 {
|
||||||
|
query = query + " AND kind = @kind"
|
||||||
|
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
|
||||||
|
}
|
||||||
|
|
||||||
|
query = getStatusFilters(query, queryParams.Status, excludeMap)
|
||||||
|
|
||||||
|
tagValues := []model.TagValues{}
|
||||||
|
|
||||||
|
finalQuery := fmt.Sprintf(`SELECT groupArray(DISTINCT stringTagMap[@key]) as stringTagValues FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
|
||||||
|
finalQuery += query
|
||||||
|
finalQuery += " LIMIT @limit"
|
||||||
|
|
||||||
|
args = append(args, clickhouse.Named("key", queryParams.TagKey.Key))
|
||||||
|
args = append(args, clickhouse.Named("limit", queryParams.Limit))
|
||||||
|
err := r.db.Select(ctx, &tagValues, finalQuery, args...)
|
||||||
|
|
||||||
|
zap.L().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanedTagValues := model.TagValues{
|
||||||
|
StringTagValues: []string{},
|
||||||
|
NumberTagValues: []float64{},
|
||||||
|
BoolTagValues: []bool{},
|
||||||
|
}
|
||||||
|
if len(tagValues) == 0 {
|
||||||
|
return &cleanedTagValues, nil
|
||||||
|
}
|
||||||
|
for _, e := range tagValues[0].StringTagValues {
|
||||||
|
if e != "" {
|
||||||
|
cleanedTagValues.StringTagValues = append(cleanedTagValues.StringTagValues, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &cleanedTagValues, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError) {
|
func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError) {
|
||||||
|
|
||||||
namedArgs := []interface{}{
|
namedArgs := []interface{}{
|
||||||
@@ -1211,6 +1823,185 @@ func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *
|
|||||||
return &response, nil
|
return &response, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, queryParams *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError) {
|
||||||
|
|
||||||
|
excludeMap := make(map[string]struct{})
|
||||||
|
for _, e := range queryParams.Exclude {
|
||||||
|
if e == constants.OperationRequest {
|
||||||
|
excludeMap[constants.OperationDB] = struct{}{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
excludeMap[e] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
SpanAggregatesDBResponseItems := []model.SpanAggregatesDBResponseItem{}
|
||||||
|
|
||||||
|
aggregation_query := ""
|
||||||
|
if queryParams.Dimension == "duration" {
|
||||||
|
switch queryParams.AggregationOption {
|
||||||
|
case "p50":
|
||||||
|
aggregation_query = " quantile(0.50)(durationNano) as float64Value "
|
||||||
|
case "p95":
|
||||||
|
aggregation_query = " quantile(0.95)(durationNano) as float64Value "
|
||||||
|
case "p90":
|
||||||
|
aggregation_query = " quantile(0.90)(durationNano) as float64Value "
|
||||||
|
case "p99":
|
||||||
|
aggregation_query = " quantile(0.99)(durationNano) as float64Value "
|
||||||
|
case "max":
|
||||||
|
aggregation_query = " max(durationNano) as value "
|
||||||
|
case "min":
|
||||||
|
aggregation_query = " min(durationNano) as value "
|
||||||
|
case "avg":
|
||||||
|
aggregation_query = " avg(durationNano) as float64Value "
|
||||||
|
case "sum":
|
||||||
|
aggregation_query = " sum(durationNano) as value "
|
||||||
|
default:
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("aggregate type: %s not supported", queryParams.AggregationOption)}
|
||||||
|
}
|
||||||
|
} else if queryParams.Dimension == "calls" {
|
||||||
|
aggregation_query = " count(*) as value "
|
||||||
|
}
|
||||||
|
|
||||||
|
args := []interface{}{clickhouse.Named("timestampL", strconv.FormatInt(queryParams.Start.UnixNano(), 10)), clickhouse.Named("timestampU", strconv.FormatInt(queryParams.End.UnixNano(), 10))}
|
||||||
|
|
||||||
|
var query string
|
||||||
|
var customStr []string
|
||||||
|
_, columnExists := constants.GroupByColMap[queryParams.GroupBy]
|
||||||
|
// Using %s for groupBy params as it can be a custom column and custom columns are not supported by clickhouse-go yet:
|
||||||
|
// issue link: https://github.com/ClickHouse/clickhouse-go/issues/870
|
||||||
|
if queryParams.GroupBy != "" && columnExists {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, queryParams.GroupBy, aggregation_query, r.TraceDB, r.indexTable)
|
||||||
|
args = append(args, clickhouse.Named("groupByVar", queryParams.GroupBy))
|
||||||
|
} else if queryParams.GroupBy != "" {
|
||||||
|
customStr = strings.Split(queryParams.GroupBy, ".(")
|
||||||
|
if len(customStr) < 2 {
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("GroupBy: %s not supported", queryParams.GroupBy)}
|
||||||
|
}
|
||||||
|
if customStr[1] == string(model.TagTypeString)+")" {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, stringTagMap['%s'] as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
|
||||||
|
} else if customStr[1] == string(model.TagTypeNumber)+")" {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(numberTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
|
||||||
|
} else if customStr[1] == string(model.TagTypeBool)+")" {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(boolTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
|
||||||
|
} else {
|
||||||
|
// return error for unsupported group by
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("GroupBy: %s not supported", queryParams.GroupBy)}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.TraceID) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.TraceID, constants.TraceID, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ServiceName) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ServiceName, constants.ServiceName, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpRoute) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpRoute, constants.HttpRoute, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpHost) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpHost, constants.HttpHost, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpMethod, constants.HttpMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.HttpUrl) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.HttpUrl, constants.HttpUrl, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.Operation) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.Operation, constants.OperationDB, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.RPCMethod) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.RPCMethod, constants.RPCMethod, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.ResponseStatusCode) > 0 {
|
||||||
|
args = buildFilterArrayQuery(ctx, excludeMap, queryParams.ResponseStatusCode, constants.ResponseStatusCode, &query, args)
|
||||||
|
}
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= @durationNanoMin"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMin", queryParams.MinDuration))
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= @durationNanoMax"
|
||||||
|
args = append(args, clickhouse.Named("durationNanoMax", queryParams.MaxDuration))
|
||||||
|
}
|
||||||
|
query = getStatusFilters(query, queryParams.Status, excludeMap)
|
||||||
|
|
||||||
|
if len(queryParams.SpanKind) != 0 {
|
||||||
|
query = query + " AND kind = @kind"
|
||||||
|
args = append(args, clickhouse.Named("kind", queryParams.SpanKind))
|
||||||
|
}
|
||||||
|
// create TagQuery from TagQueryParams
|
||||||
|
tags := createTagQueryFromTagQueryParams(queryParams.Tags)
|
||||||
|
subQuery, argsSubQuery, errStatus := buildQueryWithTagParams(ctx, tags)
|
||||||
|
query += subQuery
|
||||||
|
args = append(args, argsSubQuery...)
|
||||||
|
|
||||||
|
if errStatus != nil {
|
||||||
|
return nil, errStatus
|
||||||
|
}
|
||||||
|
|
||||||
|
if queryParams.GroupBy != "" && columnExists {
|
||||||
|
query = query + fmt.Sprintf(" GROUP BY time, %s as groupBy ORDER BY time", queryParams.GroupBy)
|
||||||
|
} else if queryParams.GroupBy != "" {
|
||||||
|
if customStr[1] == string(model.TagTypeString)+")" {
|
||||||
|
query = query + fmt.Sprintf(" GROUP BY time, stringTagMap['%s'] as groupBy ORDER BY time", customStr[0])
|
||||||
|
} else if customStr[1] == string(model.TagTypeNumber)+")" {
|
||||||
|
query = query + fmt.Sprintf(" GROUP BY time, toString(numberTagMap['%s']) as groupBy ORDER BY time", customStr[0])
|
||||||
|
} else if customStr[1] == string(model.TagTypeBool)+")" {
|
||||||
|
query = query + fmt.Sprintf(" GROUP BY time, toString(boolTagMap['%s']) as groupBy ORDER BY time", customStr[0])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
query = query + " GROUP BY time ORDER BY time"
|
||||||
|
}
|
||||||
|
|
||||||
|
err := r.db.Select(ctx, &SpanAggregatesDBResponseItems, query, args...)
|
||||||
|
|
||||||
|
zap.L().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||||
|
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||||
|
}
|
||||||
|
|
||||||
|
GetFilteredSpansAggregatesResponse := model.GetFilteredSpansAggregatesResponse{
|
||||||
|
Items: map[int64]model.SpanAggregatesResponseItem{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range SpanAggregatesDBResponseItems {
|
||||||
|
if SpanAggregatesDBResponseItems[i].Value == 0 {
|
||||||
|
SpanAggregatesDBResponseItems[i].Value = uint64(SpanAggregatesDBResponseItems[i].Float64Value)
|
||||||
|
}
|
||||||
|
SpanAggregatesDBResponseItems[i].Timestamp = int64(SpanAggregatesDBResponseItems[i].Time.UnixNano())
|
||||||
|
SpanAggregatesDBResponseItems[i].FloatValue = float32(SpanAggregatesDBResponseItems[i].Value)
|
||||||
|
if queryParams.AggregationOption == "rate_per_sec" {
|
||||||
|
SpanAggregatesDBResponseItems[i].FloatValue = float32(SpanAggregatesDBResponseItems[i].Value) / float32(queryParams.StepSeconds)
|
||||||
|
}
|
||||||
|
if responseElement, ok := GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp]; !ok {
|
||||||
|
if queryParams.GroupBy != "" && SpanAggregatesDBResponseItems[i].GroupBy != "" {
|
||||||
|
GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp] = model.SpanAggregatesResponseItem{
|
||||||
|
Timestamp: SpanAggregatesDBResponseItems[i].Timestamp,
|
||||||
|
GroupBy: map[string]float32{SpanAggregatesDBResponseItems[i].GroupBy: SpanAggregatesDBResponseItems[i].FloatValue},
|
||||||
|
}
|
||||||
|
} else if queryParams.GroupBy == "" {
|
||||||
|
GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp] = model.SpanAggregatesResponseItem{
|
||||||
|
Timestamp: SpanAggregatesDBResponseItems[i].Timestamp,
|
||||||
|
Value: SpanAggregatesDBResponseItems[i].FloatValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if queryParams.GroupBy != "" && SpanAggregatesDBResponseItems[i].GroupBy != "" {
|
||||||
|
responseElement.GroupBy[SpanAggregatesDBResponseItems[i].GroupBy] = SpanAggregatesDBResponseItems[i].FloatValue
|
||||||
|
}
|
||||||
|
GetFilteredSpansAggregatesResponse.Items[SpanAggregatesDBResponseItems[i].Timestamp] = responseElement
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &GetFilteredSpansAggregatesResponse, nil
|
||||||
|
}
|
||||||
|
|
||||||
func getLocalTableName(tableName string) string {
|
func getLocalTableName(tableName string) string {
|
||||||
|
|
||||||
tableNameSplit := strings.Split(tableName, ".")
|
tableNameSplit := strings.Split(tableName, ".")
|
||||||
|
|||||||
@@ -119,11 +119,6 @@ type APIHandler struct {
|
|||||||
nodesRepo *inframetrics.NodesRepo
|
nodesRepo *inframetrics.NodesRepo
|
||||||
namespacesRepo *inframetrics.NamespacesRepo
|
namespacesRepo *inframetrics.NamespacesRepo
|
||||||
clustersRepo *inframetrics.ClustersRepo
|
clustersRepo *inframetrics.ClustersRepo
|
||||||
// workloads
|
|
||||||
deploymentsRepo *inframetrics.DeploymentsRepo
|
|
||||||
daemonsetsRepo *inframetrics.DaemonSetsRepo
|
|
||||||
statefulsetsRepo *inframetrics.StatefulSetsRepo
|
|
||||||
jobsRepo *inframetrics.JobsRepo
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type APIHandlerOpts struct {
|
type APIHandlerOpts struct {
|
||||||
@@ -202,10 +197,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
|||||||
nodesRepo := inframetrics.NewNodesRepo(opts.Reader, querierv2)
|
nodesRepo := inframetrics.NewNodesRepo(opts.Reader, querierv2)
|
||||||
namespacesRepo := inframetrics.NewNamespacesRepo(opts.Reader, querierv2)
|
namespacesRepo := inframetrics.NewNamespacesRepo(opts.Reader, querierv2)
|
||||||
clustersRepo := inframetrics.NewClustersRepo(opts.Reader, querierv2)
|
clustersRepo := inframetrics.NewClustersRepo(opts.Reader, querierv2)
|
||||||
deploymentsRepo := inframetrics.NewDeploymentsRepo(opts.Reader, querierv2)
|
|
||||||
daemonsetsRepo := inframetrics.NewDaemonSetsRepo(opts.Reader, querierv2)
|
|
||||||
statefulsetsRepo := inframetrics.NewStatefulSetsRepo(opts.Reader, querierv2)
|
|
||||||
jobsRepo := inframetrics.NewJobsRepo(opts.Reader, querierv2)
|
|
||||||
|
|
||||||
aH := &APIHandler{
|
aH := &APIHandler{
|
||||||
reader: opts.Reader,
|
reader: opts.Reader,
|
||||||
@@ -231,10 +222,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
|||||||
nodesRepo: nodesRepo,
|
nodesRepo: nodesRepo,
|
||||||
namespacesRepo: namespacesRepo,
|
namespacesRepo: namespacesRepo,
|
||||||
clustersRepo: clustersRepo,
|
clustersRepo: clustersRepo,
|
||||||
deploymentsRepo: deploymentsRepo,
|
|
||||||
daemonsetsRepo: daemonsetsRepo,
|
|
||||||
statefulsetsRepo: statefulsetsRepo,
|
|
||||||
jobsRepo: jobsRepo,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logsQueryBuilder := logsv3.PrepareLogsQuery
|
logsQueryBuilder := logsv3.PrepareLogsQuery
|
||||||
@@ -332,8 +319,6 @@ func RespondError(w http.ResponseWriter, apiErr model.BaseApiError, data interfa
|
|||||||
code = http.StatusUnauthorized
|
code = http.StatusUnauthorized
|
||||||
case model.ErrorForbidden:
|
case model.ErrorForbidden:
|
||||||
code = http.StatusForbidden
|
code = http.StatusForbidden
|
||||||
case model.ErrorConflict:
|
|
||||||
code = http.StatusConflict
|
|
||||||
default:
|
default:
|
||||||
code = http.StatusInternalServerError
|
code = http.StatusInternalServerError
|
||||||
}
|
}
|
||||||
@@ -415,26 +400,6 @@ func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *AuthMid
|
|||||||
clustersSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getClusterAttributeKeys)).Methods(http.MethodGet)
|
clustersSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getClusterAttributeKeys)).Methods(http.MethodGet)
|
||||||
clustersSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getClusterAttributeValues)).Methods(http.MethodGet)
|
clustersSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getClusterAttributeValues)).Methods(http.MethodGet)
|
||||||
clustersSubRouter.HandleFunc("/list", am.ViewAccess(aH.getClusterList)).Methods(http.MethodPost)
|
clustersSubRouter.HandleFunc("/list", am.ViewAccess(aH.getClusterList)).Methods(http.MethodPost)
|
||||||
|
|
||||||
deploymentsSubRouter := router.PathPrefix("/api/v1/deployments").Subrouter()
|
|
||||||
deploymentsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getDeploymentAttributeKeys)).Methods(http.MethodGet)
|
|
||||||
deploymentsSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getDeploymentAttributeValues)).Methods(http.MethodGet)
|
|
||||||
deploymentsSubRouter.HandleFunc("/list", am.ViewAccess(aH.getDeploymentList)).Methods(http.MethodPost)
|
|
||||||
|
|
||||||
daemonsetsSubRouter := router.PathPrefix("/api/v1/daemonsets").Subrouter()
|
|
||||||
daemonsetsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getDaemonSetAttributeKeys)).Methods(http.MethodGet)
|
|
||||||
daemonsetsSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getDaemonSetAttributeValues)).Methods(http.MethodGet)
|
|
||||||
daemonsetsSubRouter.HandleFunc("/list", am.ViewAccess(aH.getDaemonSetList)).Methods(http.MethodPost)
|
|
||||||
|
|
||||||
statefulsetsSubRouter := router.PathPrefix("/api/v1/statefulsets").Subrouter()
|
|
||||||
statefulsetsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getStatefulSetAttributeKeys)).Methods(http.MethodGet)
|
|
||||||
statefulsetsSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getStatefulSetAttributeValues)).Methods(http.MethodGet)
|
|
||||||
statefulsetsSubRouter.HandleFunc("/list", am.ViewAccess(aH.getStatefulSetList)).Methods(http.MethodPost)
|
|
||||||
|
|
||||||
jobsSubRouter := router.PathPrefix("/api/v1/jobs").Subrouter()
|
|
||||||
jobsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getJobAttributeKeys)).Methods(http.MethodGet)
|
|
||||||
jobsSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getJobAttributeValues)).Methods(http.MethodGet)
|
|
||||||
jobsSubRouter.HandleFunc("/list", am.ViewAccess(aH.getJobList)).Methods(http.MethodPost)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) RegisterWebSocketPaths(router *mux.Router, am *AuthMiddleware) {
|
func (aH *APIHandler) RegisterWebSocketPaths(router *mux.Router, am *AuthMiddleware) {
|
||||||
@@ -526,6 +491,12 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) {
|
|||||||
router.HandleFunc("/api/v1/configs", am.OpenAccess(aH.getConfigs)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/configs", am.OpenAccess(aH.getConfigs)).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/health", am.OpenAccess(aH.getHealth)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/health", am.OpenAccess(aH.getHealth)).Methods(http.MethodGet)
|
||||||
|
|
||||||
|
router.HandleFunc("/api/v1/getSpanFilters", am.ViewAccess(aH.getSpanFilters)).Methods(http.MethodPost)
|
||||||
|
router.HandleFunc("/api/v1/getTagFilters", am.ViewAccess(aH.getTagFilters)).Methods(http.MethodPost)
|
||||||
|
router.HandleFunc("/api/v1/getFilteredSpans", am.ViewAccess(aH.getFilteredSpans)).Methods(http.MethodPost)
|
||||||
|
router.HandleFunc("/api/v1/getFilteredSpans/aggregates", am.ViewAccess(aH.getFilteredSpanAggregates)).Methods(http.MethodPost)
|
||||||
|
router.HandleFunc("/api/v1/getTagValues", am.ViewAccess(aH.getTagValues)).Methods(http.MethodPost)
|
||||||
|
|
||||||
router.HandleFunc("/api/v1/listErrors", am.ViewAccess(aH.listErrors)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/listErrors", am.ViewAccess(aH.listErrors)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/countErrors", am.ViewAccess(aH.countErrors)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/countErrors", am.ViewAccess(aH.countErrors)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/errorFromErrorID", am.ViewAccess(aH.getErrorFromErrorID)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/errorFromErrorID", am.ViewAccess(aH.getErrorFromErrorID)).Methods(http.MethodGet)
|
||||||
@@ -1841,6 +1812,86 @@ func (aH *APIHandler) getErrorFromGroupID(w http.ResponseWriter, r *http.Request
|
|||||||
aH.WriteJSON(w, r, result)
|
aH.WriteJSON(w, r, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getSpanFilters(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseSpanFilterRequestBody(r)
|
||||||
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, apiErr := aH.reader.GetSpanFilters(r.Context(), query)
|
||||||
|
|
||||||
|
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.WriteJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getFilteredSpans(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseFilteredSpansRequest(r, aH)
|
||||||
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, apiErr := aH.reader.GetFilteredSpans(r.Context(), query)
|
||||||
|
|
||||||
|
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.WriteJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getFilteredSpanAggregates(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseFilteredSpanAggregatesRequest(r)
|
||||||
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, apiErr := aH.reader.GetFilteredSpansAggregates(r.Context(), query)
|
||||||
|
|
||||||
|
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.WriteJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getTagFilters(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseTagFilterRequest(r)
|
||||||
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, apiErr := aH.reader.GetTagFilters(r.Context(), query)
|
||||||
|
|
||||||
|
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.WriteJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aH *APIHandler) getTagValues(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
query, err := parseTagValueRequest(r)
|
||||||
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, apiErr := aH.reader.GetTagValues(r.Context(), query)
|
||||||
|
|
||||||
|
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aH.WriteJSON(w, r, result)
|
||||||
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
|
||||||
ttlParams, err := parseTTLParams(r)
|
ttlParams, err := parseTTLParams(r)
|
||||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||||
|
|||||||
@@ -334,213 +334,3 @@ func (aH *APIHandler) getClusterList(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
aH.Respond(w, clusterList)
|
aH.Respond(w, clusterList)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getDeploymentAttributeKeys(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeKeyRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
keys, err := aH.deploymentsRepo.GetDeploymentAttributeKeys(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getDeploymentAttributeValues(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeValueRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
values, err := aH.deploymentsRepo.GetDeploymentAttributeValues(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, values)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getDeploymentList(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req := model.DeploymentListRequest{}
|
|
||||||
|
|
||||||
err := json.NewDecoder(r.Body).Decode(&req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
deploymentList, err := aH.deploymentsRepo.GetDeploymentList(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, deploymentList)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getDaemonSetAttributeKeys(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeKeyRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
keys, err := aH.daemonsetsRepo.GetDaemonSetAttributeKeys(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getDaemonSetAttributeValues(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeValueRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
values, err := aH.daemonsetsRepo.GetDaemonSetAttributeValues(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, values)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getDaemonSetList(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req := model.DaemonSetListRequest{}
|
|
||||||
|
|
||||||
err := json.NewDecoder(r.Body).Decode(&req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
daemonSetList, err := aH.daemonsetsRepo.GetDaemonSetList(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, daemonSetList)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getStatefulSetAttributeKeys(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeKeyRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
keys, err := aH.statefulsetsRepo.GetStatefulSetAttributeKeys(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getStatefulSetAttributeValues(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeValueRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
values, err := aH.statefulsetsRepo.GetStatefulSetAttributeValues(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, values)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getStatefulSetList(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req := model.StatefulSetListRequest{}
|
|
||||||
|
|
||||||
err := json.NewDecoder(r.Body).Decode(&req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
statefulSetList, err := aH.statefulsetsRepo.GetStatefulSetList(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, statefulSetList)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getJobAttributeKeys(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeKeyRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
keys, err := aH.jobsRepo.GetJobAttributeKeys(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
aH.Respond(w, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getJobAttributeValues(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req, err := parseFilterAttributeValueRequest(r)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
values, err := aH.jobsRepo.GetJobAttributeValues(ctx, *req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
aH.Respond(w, values)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (aH *APIHandler) getJobList(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx := r.Context()
|
|
||||||
req := model.JobListRequest{}
|
|
||||||
|
|
||||||
err := json.NewDecoder(r.Body).Decode(&req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
jobList, err := aH.jobsRepo.GetJobList(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
aH.Respond(w, jobList)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -73,22 +73,6 @@ func getParamsForTopClusters(req model.ClusterListRequest) (int64, string, strin
|
|||||||
return getParamsForTopItems(req.Start, req.End)
|
return getParamsForTopItems(req.Start, req.End)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getParamsForTopDeployments(req model.DeploymentListRequest) (int64, string, string) {
|
|
||||||
return getParamsForTopItems(req.Start, req.End)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getParamsForTopDaemonSets(req model.DaemonSetListRequest) (int64, string, string) {
|
|
||||||
return getParamsForTopItems(req.Start, req.End)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getParamsForTopStatefulSets(req model.StatefulSetListRequest) (int64, string, string) {
|
|
||||||
return getParamsForTopItems(req.Start, req.End)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getParamsForTopJobs(req model.JobListRequest) (int64, string, string) {
|
|
||||||
return getParamsForTopItems(req.Start, req.End)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(srikanthccv): remove this
|
// TODO(srikanthccv): remove this
|
||||||
// What is happening here?
|
// What is happening here?
|
||||||
// The `PrepareTimeseriesFilterQuery` uses the local time series table for sub-query because each fingerprint
|
// The `PrepareTimeseriesFilterQuery` uses the local time series table for sub-query because each fingerprint
|
||||||
|
|||||||
@@ -1,444 +0,0 @@
|
|||||||
package inframetrics
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"math"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
metricToUseForDaemonSets = "k8s_pod_cpu_utilization"
|
|
||||||
k8sDaemonSetNameAttrKey = "k8s_daemonset_name"
|
|
||||||
|
|
||||||
metricNamesForDaemonSets = map[string]string{
|
|
||||||
"desired_nodes": "k8s_daemonset_desired_scheduled_nodes",
|
|
||||||
"available_nodes": "k8s_daemonset_current_scheduled_nodes",
|
|
||||||
}
|
|
||||||
|
|
||||||
daemonSetAttrsToEnrich = []string{
|
|
||||||
"k8s_daemonset_name",
|
|
||||||
"k8s_namespace_name",
|
|
||||||
"k8s_cluster_name",
|
|
||||||
}
|
|
||||||
|
|
||||||
queryNamesForDaemonSets = map[string][]string{
|
|
||||||
"cpu": {"A"},
|
|
||||||
"cpu_request": {"B", "A"},
|
|
||||||
"cpu_limit": {"C", "A"},
|
|
||||||
"memory": {"D"},
|
|
||||||
"memory_request": {"E", "D"},
|
|
||||||
"memory_limit": {"F", "D"},
|
|
||||||
"restarts": {"G", "A"},
|
|
||||||
"desired_nodes": {"H"},
|
|
||||||
"available_nodes": {"I"},
|
|
||||||
}
|
|
||||||
|
|
||||||
builderQueriesForDaemonSets = map[string]*v3.BuilderQuery{
|
|
||||||
// desired nodes
|
|
||||||
"H": {
|
|
||||||
QueryName: "H",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForDaemonSets["desired_nodes"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "H",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// available nodes
|
|
||||||
"I": {
|
|
||||||
QueryName: "I",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForDaemonSets["available_nodes"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "I",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
daemonSetQueryNames = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I"}
|
|
||||||
)
|
|
||||||
|
|
||||||
type DaemonSetsRepo struct {
|
|
||||||
reader interfaces.Reader
|
|
||||||
querierV2 interfaces.Querier
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewDaemonSetsRepo(reader interfaces.Reader, querierV2 interfaces.Querier) *DaemonSetsRepo {
|
|
||||||
return &DaemonSetsRepo{reader: reader, querierV2: querierV2}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DaemonSetsRepo) GetDaemonSetAttributeKeys(ctx context.Context, req v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error) {
|
|
||||||
// TODO(srikanthccv): remove hardcoded metric name and support keys from any pod metric
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForDaemonSets
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeKeysResponse, err := d.reader.GetMetricAttributeKeys(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(srikanthccv): only return resource attributes when we have a way to
|
|
||||||
// distinguish between resource attributes and other attributes.
|
|
||||||
filteredKeys := []v3.AttributeKey{}
|
|
||||||
for _, key := range attributeKeysResponse.AttributeKeys {
|
|
||||||
if slices.Contains(pointAttrsToIgnore, key.Key) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
filteredKeys = append(filteredKeys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &v3.FilterAttributeKeyResponse{AttributeKeys: filteredKeys}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DaemonSetsRepo) GetDaemonSetAttributeValues(ctx context.Context, req v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForDaemonSets
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeValuesResponse, err := d.reader.GetMetricAttributeValues(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return attributeValuesResponse, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DaemonSetsRepo) getMetadataAttributes(ctx context.Context, req model.DaemonSetListRequest) (map[string]map[string]string, error) {
|
|
||||||
daemonSetAttrs := map[string]map[string]string{}
|
|
||||||
|
|
||||||
for _, key := range daemonSetAttrsToEnrich {
|
|
||||||
hasKey := false
|
|
||||||
for _, groupByKey := range req.GroupBy {
|
|
||||||
if groupByKey.Key == key {
|
|
||||||
hasKey = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !hasKey {
|
|
||||||
req.GroupBy = append(req.GroupBy, v3.AttributeKey{Key: key})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mq := v3.BuilderQuery{
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricToUseForDaemonSets,
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
GroupBy: req.GroupBy,
|
|
||||||
}
|
|
||||||
|
|
||||||
query, err := helpers.PrepareTimeseriesFilterQuery(req.Start, req.End, &mq)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
query = localQueryToDistributedQuery(query)
|
|
||||||
|
|
||||||
attrsListResponse, err := d.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, row := range attrsListResponse {
|
|
||||||
stringData := map[string]string{}
|
|
||||||
for key, value := range row.Data {
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
stringData[key] = str
|
|
||||||
} else if strPtr, ok := value.(*string); ok {
|
|
||||||
stringData[key] = *strPtr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
daemonSetName := stringData[k8sDaemonSetNameAttrKey]
|
|
||||||
if _, ok := daemonSetAttrs[daemonSetName]; !ok {
|
|
||||||
daemonSetAttrs[daemonSetName] = map[string]string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, key := range req.GroupBy {
|
|
||||||
daemonSetAttrs[daemonSetName][key.Key] = stringData[key.Key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return daemonSetAttrs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DaemonSetsRepo) getTopDaemonSetGroups(ctx context.Context, req model.DaemonSetListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
|
||||||
step, timeSeriesTableName, samplesTableName := getParamsForTopDaemonSets(req)
|
|
||||||
|
|
||||||
queryNames := queryNamesForDaemonSets[req.OrderBy.ColumnName]
|
|
||||||
topDaemonSetGroupsQueryRangeParams := &v3.QueryRangeParamsV3{
|
|
||||||
Start: req.Start,
|
|
||||||
End: req.End,
|
|
||||||
Step: step,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeTable,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, queryName := range queryNames {
|
|
||||||
query := q.CompositeQuery.BuilderQueries[queryName].Clone()
|
|
||||||
query.StepInterval = step
|
|
||||||
query.MetricTableHints = &v3.MetricTableHints{
|
|
||||||
TimeSeriesTableName: timeSeriesTableName,
|
|
||||||
SamplesTableName: samplesTableName,
|
|
||||||
}
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
topDaemonSetGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, topDaemonSetGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, topDaemonSetGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(formattedResponse) == 0 || len(formattedResponse[0].Series) == 0 {
|
|
||||||
return nil, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy.Order == v3.DirectionDesc {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value > formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value < formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
limit := math.Min(float64(req.Offset+req.Limit), float64(len(formattedResponse[0].Series)))
|
|
||||||
|
|
||||||
paginatedTopDaemonSetGroupsSeries := formattedResponse[0].Series[req.Offset:int(limit)]
|
|
||||||
|
|
||||||
topDaemonSetGroups := []map[string]string{}
|
|
||||||
for _, series := range paginatedTopDaemonSetGroupsSeries {
|
|
||||||
topDaemonSetGroups = append(topDaemonSetGroups, series.Labels)
|
|
||||||
}
|
|
||||||
allDaemonSetGroups := []map[string]string{}
|
|
||||||
for _, series := range formattedResponse[0].Series {
|
|
||||||
allDaemonSetGroups = append(allDaemonSetGroups, series.Labels)
|
|
||||||
}
|
|
||||||
|
|
||||||
return topDaemonSetGroups, allDaemonSetGroups, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DaemonSetsRepo) GetDaemonSetList(ctx context.Context, req model.DaemonSetListRequest) (model.DaemonSetListResponse, error) {
|
|
||||||
resp := model.DaemonSetListResponse{}
|
|
||||||
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 10
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy == nil {
|
|
||||||
req.OrderBy = &v3.OrderBy{ColumnName: "cpu", Order: v3.DirectionDesc}
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.GroupBy == nil {
|
|
||||||
req.GroupBy = []v3.AttributeKey{{Key: k8sDaemonSetNameAttrKey}}
|
|
||||||
resp.Type = model.ResponseTypeList
|
|
||||||
} else {
|
|
||||||
resp.Type = model.ResponseTypeGroupedList
|
|
||||||
}
|
|
||||||
|
|
||||||
step := int64(math.Max(float64(common.MinAllowedStepInterval(req.Start, req.End)), 60))
|
|
||||||
|
|
||||||
query := WorkloadTableListQuery.Clone()
|
|
||||||
|
|
||||||
query.Start = req.Start
|
|
||||||
query.End = req.End
|
|
||||||
query.Step = step
|
|
||||||
|
|
||||||
// add additional queries for daemon sets
|
|
||||||
for _, daemonSetQuery := range builderQueriesForDaemonSets {
|
|
||||||
query.CompositeQuery.BuilderQueries[daemonSetQuery.QueryName] = daemonSetQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.StepInterval = step
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
query.GroupBy = req.GroupBy
|
|
||||||
// make sure we only get records for daemon sets
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: k8sDaemonSetNameAttrKey},
|
|
||||||
Operator: v3.FilterOperatorExists,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
daemonSetAttrs, err := d.getMetadataAttributes(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
topDaemonSetGroups, allDaemonSetGroups, err := d.getTopDaemonSetGroups(ctx, req, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
groupFilters := map[string][]string{}
|
|
||||||
for _, topDaemonSetGroup := range topDaemonSetGroups {
|
|
||||||
for k, v := range topDaemonSetGroup {
|
|
||||||
groupFilters[k] = append(groupFilters[k], v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for groupKey, groupValues := range groupFilters {
|
|
||||||
hasGroupFilter := false
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
for _, filter := range req.Filters.Items {
|
|
||||||
if filter.Key.Key == groupKey {
|
|
||||||
hasGroupFilter = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasGroupFilter {
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: groupKey},
|
|
||||||
Value: groupValues,
|
|
||||||
Operator: v3.FilterOperatorIn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
records := []model.DaemonSetListRecord{}
|
|
||||||
|
|
||||||
for _, result := range formattedResponse {
|
|
||||||
for _, row := range result.Table.Rows {
|
|
||||||
|
|
||||||
record := model.DaemonSetListRecord{
|
|
||||||
DaemonSetName: "",
|
|
||||||
CPUUsage: -1,
|
|
||||||
CPURequest: -1,
|
|
||||||
CPULimit: -1,
|
|
||||||
MemoryUsage: -1,
|
|
||||||
MemoryRequest: -1,
|
|
||||||
MemoryLimit: -1,
|
|
||||||
DesiredNodes: -1,
|
|
||||||
AvailableNodes: -1,
|
|
||||||
}
|
|
||||||
|
|
||||||
if daemonSetName, ok := row.Data[k8sDaemonSetNameAttrKey].(string); ok {
|
|
||||||
record.DaemonSetName = daemonSetName
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpu, ok := row.Data["A"].(float64); ok {
|
|
||||||
record.CPUUsage = cpu
|
|
||||||
}
|
|
||||||
if cpuRequest, ok := row.Data["B"].(float64); ok {
|
|
||||||
record.CPURequest = cpuRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpuLimit, ok := row.Data["C"].(float64); ok {
|
|
||||||
record.CPULimit = cpuLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if memory, ok := row.Data["D"].(float64); ok {
|
|
||||||
record.MemoryUsage = memory
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryRequest, ok := row.Data["E"].(float64); ok {
|
|
||||||
record.MemoryRequest = memoryRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryLimit, ok := row.Data["F"].(float64); ok {
|
|
||||||
record.MemoryLimit = memoryLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if restarts, ok := row.Data["G"].(float64); ok {
|
|
||||||
record.Restarts = int(restarts)
|
|
||||||
}
|
|
||||||
|
|
||||||
if desiredNodes, ok := row.Data["H"].(float64); ok {
|
|
||||||
record.DesiredNodes = int(desiredNodes)
|
|
||||||
}
|
|
||||||
|
|
||||||
if availableNodes, ok := row.Data["I"].(float64); ok {
|
|
||||||
record.AvailableNodes = int(availableNodes)
|
|
||||||
}
|
|
||||||
|
|
||||||
record.Meta = map[string]string{}
|
|
||||||
if _, ok := daemonSetAttrs[record.DaemonSetName]; ok {
|
|
||||||
record.Meta = daemonSetAttrs[record.DaemonSetName]
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range row.Data {
|
|
||||||
if slices.Contains(daemonSetQueryNames, k) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if labelValue, ok := v.(string); ok {
|
|
||||||
record.Meta[k] = labelValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
resp.Total = len(allDaemonSetGroups)
|
|
||||||
resp.Records = records
|
|
||||||
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
@@ -1,444 +0,0 @@
|
|||||||
package inframetrics
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"math"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
metricToUseForDeployments = "k8s_pod_cpu_utilization"
|
|
||||||
k8sDeploymentNameAttrKey = "k8s_deployment_name"
|
|
||||||
|
|
||||||
metricNamesForDeployments = map[string]string{
|
|
||||||
"desired_pods": "k8s_deployment_desired",
|
|
||||||
"available_pods": "k8s_deployment_available",
|
|
||||||
}
|
|
||||||
|
|
||||||
deploymentAttrsToEnrich = []string{
|
|
||||||
"k8s_deployment_name",
|
|
||||||
"k8s_namespace_name",
|
|
||||||
"k8s_cluster_name",
|
|
||||||
}
|
|
||||||
|
|
||||||
queryNamesForDeployments = map[string][]string{
|
|
||||||
"cpu": {"A"},
|
|
||||||
"cpu_request": {"B", "A"},
|
|
||||||
"cpu_limit": {"C", "A"},
|
|
||||||
"memory": {"D"},
|
|
||||||
"memory_request": {"E", "D"},
|
|
||||||
"memory_limit": {"F", "D"},
|
|
||||||
"restarts": {"G", "A"},
|
|
||||||
"desired_pods": {"H"},
|
|
||||||
"available_pods": {"I"},
|
|
||||||
}
|
|
||||||
|
|
||||||
builderQueriesForDeployments = map[string]*v3.BuilderQuery{
|
|
||||||
// desired pods
|
|
||||||
"H": {
|
|
||||||
QueryName: "H",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForDeployments["desired_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "H",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// available pods
|
|
||||||
"I": {
|
|
||||||
QueryName: "I",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForDeployments["available_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "I",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
deploymentQueryNames = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I"}
|
|
||||||
)
|
|
||||||
|
|
||||||
type DeploymentsRepo struct {
|
|
||||||
reader interfaces.Reader
|
|
||||||
querierV2 interfaces.Querier
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewDeploymentsRepo(reader interfaces.Reader, querierV2 interfaces.Querier) *DeploymentsRepo {
|
|
||||||
return &DeploymentsRepo{reader: reader, querierV2: querierV2}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DeploymentsRepo) GetDeploymentAttributeKeys(ctx context.Context, req v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error) {
|
|
||||||
// TODO(srikanthccv): remove hardcoded metric name and support keys from any pod metric
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForDeployments
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeKeysResponse, err := d.reader.GetMetricAttributeKeys(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(srikanthccv): only return resource attributes when we have a way to
|
|
||||||
// distinguish between resource attributes and other attributes.
|
|
||||||
filteredKeys := []v3.AttributeKey{}
|
|
||||||
for _, key := range attributeKeysResponse.AttributeKeys {
|
|
||||||
if slices.Contains(pointAttrsToIgnore, key.Key) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
filteredKeys = append(filteredKeys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &v3.FilterAttributeKeyResponse{AttributeKeys: filteredKeys}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DeploymentsRepo) GetDeploymentAttributeValues(ctx context.Context, req v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForDeployments
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeValuesResponse, err := d.reader.GetMetricAttributeValues(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return attributeValuesResponse, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DeploymentsRepo) getMetadataAttributes(ctx context.Context, req model.DeploymentListRequest) (map[string]map[string]string, error) {
|
|
||||||
deploymentAttrs := map[string]map[string]string{}
|
|
||||||
|
|
||||||
for _, key := range deploymentAttrsToEnrich {
|
|
||||||
hasKey := false
|
|
||||||
for _, groupByKey := range req.GroupBy {
|
|
||||||
if groupByKey.Key == key {
|
|
||||||
hasKey = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !hasKey {
|
|
||||||
req.GroupBy = append(req.GroupBy, v3.AttributeKey{Key: key})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mq := v3.BuilderQuery{
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricToUseForDeployments,
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
GroupBy: req.GroupBy,
|
|
||||||
}
|
|
||||||
|
|
||||||
query, err := helpers.PrepareTimeseriesFilterQuery(req.Start, req.End, &mq)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
query = localQueryToDistributedQuery(query)
|
|
||||||
|
|
||||||
attrsListResponse, err := d.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, row := range attrsListResponse {
|
|
||||||
stringData := map[string]string{}
|
|
||||||
for key, value := range row.Data {
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
stringData[key] = str
|
|
||||||
} else if strPtr, ok := value.(*string); ok {
|
|
||||||
stringData[key] = *strPtr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
deploymentName := stringData[k8sDeploymentNameAttrKey]
|
|
||||||
if _, ok := deploymentAttrs[deploymentName]; !ok {
|
|
||||||
deploymentAttrs[deploymentName] = map[string]string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, key := range req.GroupBy {
|
|
||||||
deploymentAttrs[deploymentName][key.Key] = stringData[key.Key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return deploymentAttrs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DeploymentsRepo) getTopDeploymentGroups(ctx context.Context, req model.DeploymentListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
|
||||||
step, timeSeriesTableName, samplesTableName := getParamsForTopDeployments(req)
|
|
||||||
|
|
||||||
queryNames := queryNamesForDeployments[req.OrderBy.ColumnName]
|
|
||||||
topDeploymentGroupsQueryRangeParams := &v3.QueryRangeParamsV3{
|
|
||||||
Start: req.Start,
|
|
||||||
End: req.End,
|
|
||||||
Step: step,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeTable,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, queryName := range queryNames {
|
|
||||||
query := q.CompositeQuery.BuilderQueries[queryName].Clone()
|
|
||||||
query.StepInterval = step
|
|
||||||
query.MetricTableHints = &v3.MetricTableHints{
|
|
||||||
TimeSeriesTableName: timeSeriesTableName,
|
|
||||||
SamplesTableName: samplesTableName,
|
|
||||||
}
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
topDeploymentGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, topDeploymentGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, topDeploymentGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(formattedResponse) == 0 || len(formattedResponse[0].Series) == 0 {
|
|
||||||
return nil, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy.Order == v3.DirectionDesc {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value > formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value < formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
limit := math.Min(float64(req.Offset+req.Limit), float64(len(formattedResponse[0].Series)))
|
|
||||||
|
|
||||||
paginatedTopDeploymentGroupsSeries := formattedResponse[0].Series[req.Offset:int(limit)]
|
|
||||||
|
|
||||||
topDeploymentGroups := []map[string]string{}
|
|
||||||
for _, series := range paginatedTopDeploymentGroupsSeries {
|
|
||||||
topDeploymentGroups = append(topDeploymentGroups, series.Labels)
|
|
||||||
}
|
|
||||||
allDeploymentGroups := []map[string]string{}
|
|
||||||
for _, series := range formattedResponse[0].Series {
|
|
||||||
allDeploymentGroups = append(allDeploymentGroups, series.Labels)
|
|
||||||
}
|
|
||||||
|
|
||||||
return topDeploymentGroups, allDeploymentGroups, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *DeploymentsRepo) GetDeploymentList(ctx context.Context, req model.DeploymentListRequest) (model.DeploymentListResponse, error) {
|
|
||||||
resp := model.DeploymentListResponse{}
|
|
||||||
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 10
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy == nil {
|
|
||||||
req.OrderBy = &v3.OrderBy{ColumnName: "cpu", Order: v3.DirectionDesc}
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.GroupBy == nil {
|
|
||||||
req.GroupBy = []v3.AttributeKey{{Key: k8sDeploymentNameAttrKey}}
|
|
||||||
resp.Type = model.ResponseTypeList
|
|
||||||
} else {
|
|
||||||
resp.Type = model.ResponseTypeGroupedList
|
|
||||||
}
|
|
||||||
|
|
||||||
step := int64(math.Max(float64(common.MinAllowedStepInterval(req.Start, req.End)), 60))
|
|
||||||
|
|
||||||
query := WorkloadTableListQuery.Clone()
|
|
||||||
|
|
||||||
query.Start = req.Start
|
|
||||||
query.End = req.End
|
|
||||||
query.Step = step
|
|
||||||
|
|
||||||
// add additional queries for deployments
|
|
||||||
for _, deploymentQuery := range builderQueriesForDeployments {
|
|
||||||
query.CompositeQuery.BuilderQueries[deploymentQuery.QueryName] = deploymentQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.StepInterval = step
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
query.GroupBy = req.GroupBy
|
|
||||||
// make sure we only get records for deployments
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: k8sDeploymentNameAttrKey},
|
|
||||||
Operator: v3.FilterOperatorExists,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
deploymentAttrs, err := d.getMetadataAttributes(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
topDeploymentGroups, allDeploymentGroups, err := d.getTopDeploymentGroups(ctx, req, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
groupFilters := map[string][]string{}
|
|
||||||
for _, topDeploymentGroup := range topDeploymentGroups {
|
|
||||||
for k, v := range topDeploymentGroup {
|
|
||||||
groupFilters[k] = append(groupFilters[k], v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for groupKey, groupValues := range groupFilters {
|
|
||||||
hasGroupFilter := false
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
for _, filter := range req.Filters.Items {
|
|
||||||
if filter.Key.Key == groupKey {
|
|
||||||
hasGroupFilter = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasGroupFilter {
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: groupKey},
|
|
||||||
Value: groupValues,
|
|
||||||
Operator: v3.FilterOperatorIn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
records := []model.DeploymentListRecord{}
|
|
||||||
|
|
||||||
for _, result := range formattedResponse {
|
|
||||||
for _, row := range result.Table.Rows {
|
|
||||||
|
|
||||||
record := model.DeploymentListRecord{
|
|
||||||
DeploymentName: "",
|
|
||||||
CPUUsage: -1,
|
|
||||||
CPURequest: -1,
|
|
||||||
CPULimit: -1,
|
|
||||||
MemoryUsage: -1,
|
|
||||||
MemoryRequest: -1,
|
|
||||||
MemoryLimit: -1,
|
|
||||||
DesiredPods: -1,
|
|
||||||
AvailablePods: -1,
|
|
||||||
}
|
|
||||||
|
|
||||||
if deploymentName, ok := row.Data[k8sDeploymentNameAttrKey].(string); ok {
|
|
||||||
record.DeploymentName = deploymentName
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpu, ok := row.Data["A"].(float64); ok {
|
|
||||||
record.CPUUsage = cpu
|
|
||||||
}
|
|
||||||
if cpuRequest, ok := row.Data["B"].(float64); ok {
|
|
||||||
record.CPURequest = cpuRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpuLimit, ok := row.Data["C"].(float64); ok {
|
|
||||||
record.CPULimit = cpuLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if memory, ok := row.Data["D"].(float64); ok {
|
|
||||||
record.MemoryUsage = memory
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryRequest, ok := row.Data["E"].(float64); ok {
|
|
||||||
record.MemoryRequest = memoryRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryLimit, ok := row.Data["F"].(float64); ok {
|
|
||||||
record.MemoryLimit = memoryLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if restarts, ok := row.Data["G"].(float64); ok {
|
|
||||||
record.Restarts = int(restarts)
|
|
||||||
}
|
|
||||||
|
|
||||||
if desiredPods, ok := row.Data["H"].(float64); ok {
|
|
||||||
record.DesiredPods = int(desiredPods)
|
|
||||||
}
|
|
||||||
|
|
||||||
if availablePods, ok := row.Data["I"].(float64); ok {
|
|
||||||
record.AvailablePods = int(availablePods)
|
|
||||||
}
|
|
||||||
|
|
||||||
record.Meta = map[string]string{}
|
|
||||||
if _, ok := deploymentAttrs[record.DeploymentName]; ok {
|
|
||||||
record.Meta = deploymentAttrs[record.DeploymentName]
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range row.Data {
|
|
||||||
if slices.Contains(deploymentQueryNames, k) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if labelValue, ok := v.(string); ok {
|
|
||||||
record.Meta[k] = labelValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
resp.Total = len(allDeploymentGroups)
|
|
||||||
resp.Records = records
|
|
||||||
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
@@ -1,498 +0,0 @@
|
|||||||
package inframetrics
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"math"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
metricToUseForJobs = "k8s_pod_cpu_utilization"
|
|
||||||
k8sJobNameAttrKey = "k8s_job_name"
|
|
||||||
|
|
||||||
metricNamesForJobs = map[string]string{
|
|
||||||
"desired_successful_pods": "k8s_job_desired_successful_pods",
|
|
||||||
"active_pods": "k8s_job_active_pods",
|
|
||||||
"failed_pods": "k8s_job_failed_pods",
|
|
||||||
"successful_pods": "k8s_job_successful_pods",
|
|
||||||
}
|
|
||||||
|
|
||||||
jobAttrsToEnrich = []string{
|
|
||||||
"k8s_job_name",
|
|
||||||
"k8s_namespace_name",
|
|
||||||
"k8s_cluster_name",
|
|
||||||
}
|
|
||||||
|
|
||||||
queryNamesForJobs = map[string][]string{
|
|
||||||
"cpu": {"A"},
|
|
||||||
"cpu_request": {"B", "A"},
|
|
||||||
"cpu_limit": {"C", "A"},
|
|
||||||
"memory": {"D"},
|
|
||||||
"memory_request": {"E", "D"},
|
|
||||||
"memory_limit": {"F", "D"},
|
|
||||||
"restarts": {"G", "A"},
|
|
||||||
"desired_pods": {"H"},
|
|
||||||
"active_pods": {"I"},
|
|
||||||
"failed_pods": {"J"},
|
|
||||||
"successful_pods": {"K"},
|
|
||||||
}
|
|
||||||
|
|
||||||
builderQueriesForJobs = map[string]*v3.BuilderQuery{
|
|
||||||
// desired nodes
|
|
||||||
"H": {
|
|
||||||
QueryName: "H",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForJobs["desired_successful_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "H",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// available nodes
|
|
||||||
"I": {
|
|
||||||
QueryName: "I",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForJobs["active_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "I",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// failed pods
|
|
||||||
"J": {
|
|
||||||
QueryName: "J",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForJobs["failed_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "J",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// successful pods
|
|
||||||
"K": {
|
|
||||||
QueryName: "K",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForJobs["successful_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "K",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
jobQueryNames = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K"}
|
|
||||||
)
|
|
||||||
|
|
||||||
type JobsRepo struct {
|
|
||||||
reader interfaces.Reader
|
|
||||||
querierV2 interfaces.Querier
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewJobsRepo(reader interfaces.Reader, querierV2 interfaces.Querier) *JobsRepo {
|
|
||||||
return &JobsRepo{reader: reader, querierV2: querierV2}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *JobsRepo) GetJobAttributeKeys(ctx context.Context, req v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error) {
|
|
||||||
// TODO(srikanthccv): remove hardcoded metric name and support keys from any pod metric
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForJobs
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeKeysResponse, err := d.reader.GetMetricAttributeKeys(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(srikanthccv): only return resource attributes when we have a way to
|
|
||||||
// distinguish between resource attributes and other attributes.
|
|
||||||
filteredKeys := []v3.AttributeKey{}
|
|
||||||
for _, key := range attributeKeysResponse.AttributeKeys {
|
|
||||||
if slices.Contains(pointAttrsToIgnore, key.Key) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
filteredKeys = append(filteredKeys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &v3.FilterAttributeKeyResponse{AttributeKeys: filteredKeys}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *JobsRepo) GetJobAttributeValues(ctx context.Context, req v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForJobs
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeValuesResponse, err := d.reader.GetMetricAttributeValues(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return attributeValuesResponse, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *JobsRepo) getMetadataAttributes(ctx context.Context, req model.JobListRequest) (map[string]map[string]string, error) {
|
|
||||||
jobAttrs := map[string]map[string]string{}
|
|
||||||
|
|
||||||
for _, key := range jobAttrsToEnrich {
|
|
||||||
hasKey := false
|
|
||||||
for _, groupByKey := range req.GroupBy {
|
|
||||||
if groupByKey.Key == key {
|
|
||||||
hasKey = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !hasKey {
|
|
||||||
req.GroupBy = append(req.GroupBy, v3.AttributeKey{Key: key})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mq := v3.BuilderQuery{
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricToUseForJobs,
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
GroupBy: req.GroupBy,
|
|
||||||
}
|
|
||||||
|
|
||||||
query, err := helpers.PrepareTimeseriesFilterQuery(req.Start, req.End, &mq)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
query = localQueryToDistributedQuery(query)
|
|
||||||
|
|
||||||
attrsListResponse, err := d.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, row := range attrsListResponse {
|
|
||||||
stringData := map[string]string{}
|
|
||||||
for key, value := range row.Data {
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
stringData[key] = str
|
|
||||||
} else if strPtr, ok := value.(*string); ok {
|
|
||||||
stringData[key] = *strPtr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
jobName := stringData[k8sJobNameAttrKey]
|
|
||||||
if _, ok := jobAttrs[jobName]; !ok {
|
|
||||||
jobAttrs[jobName] = map[string]string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, key := range req.GroupBy {
|
|
||||||
jobAttrs[jobName][key.Key] = stringData[key.Key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return jobAttrs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *JobsRepo) getTopJobGroups(ctx context.Context, req model.JobListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
|
||||||
step, timeSeriesTableName, samplesTableName := getParamsForTopJobs(req)
|
|
||||||
|
|
||||||
queryNames := queryNamesForJobs[req.OrderBy.ColumnName]
|
|
||||||
topJobGroupsQueryRangeParams := &v3.QueryRangeParamsV3{
|
|
||||||
Start: req.Start,
|
|
||||||
End: req.End,
|
|
||||||
Step: step,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeTable,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, queryName := range queryNames {
|
|
||||||
query := q.CompositeQuery.BuilderQueries[queryName].Clone()
|
|
||||||
query.StepInterval = step
|
|
||||||
query.MetricTableHints = &v3.MetricTableHints{
|
|
||||||
TimeSeriesTableName: timeSeriesTableName,
|
|
||||||
SamplesTableName: samplesTableName,
|
|
||||||
}
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
topJobGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, topJobGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, topJobGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(formattedResponse) == 0 || len(formattedResponse[0].Series) == 0 {
|
|
||||||
return nil, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy.Order == v3.DirectionDesc {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value > formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value < formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
limit := math.Min(float64(req.Offset+req.Limit), float64(len(formattedResponse[0].Series)))
|
|
||||||
|
|
||||||
paginatedTopJobGroupsSeries := formattedResponse[0].Series[req.Offset:int(limit)]
|
|
||||||
|
|
||||||
topJobGroups := []map[string]string{}
|
|
||||||
for _, series := range paginatedTopJobGroupsSeries {
|
|
||||||
topJobGroups = append(topJobGroups, series.Labels)
|
|
||||||
}
|
|
||||||
allJobGroups := []map[string]string{}
|
|
||||||
for _, series := range formattedResponse[0].Series {
|
|
||||||
allJobGroups = append(allJobGroups, series.Labels)
|
|
||||||
}
|
|
||||||
|
|
||||||
return topJobGroups, allJobGroups, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *JobsRepo) GetJobList(ctx context.Context, req model.JobListRequest) (model.JobListResponse, error) {
|
|
||||||
resp := model.JobListResponse{}
|
|
||||||
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 10
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy == nil {
|
|
||||||
req.OrderBy = &v3.OrderBy{ColumnName: "desired_pods", Order: v3.DirectionDesc}
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.GroupBy == nil {
|
|
||||||
req.GroupBy = []v3.AttributeKey{{Key: k8sJobNameAttrKey}}
|
|
||||||
resp.Type = model.ResponseTypeList
|
|
||||||
} else {
|
|
||||||
resp.Type = model.ResponseTypeGroupedList
|
|
||||||
}
|
|
||||||
|
|
||||||
step := int64(math.Max(float64(common.MinAllowedStepInterval(req.Start, req.End)), 60))
|
|
||||||
|
|
||||||
query := WorkloadTableListQuery.Clone()
|
|
||||||
|
|
||||||
query.Start = req.Start
|
|
||||||
query.End = req.End
|
|
||||||
query.Step = step
|
|
||||||
|
|
||||||
// add additional queries for jobs
|
|
||||||
for _, jobQuery := range builderQueriesForJobs {
|
|
||||||
query.CompositeQuery.BuilderQueries[jobQuery.QueryName] = jobQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.StepInterval = step
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
query.GroupBy = req.GroupBy
|
|
||||||
// make sure we only get records for jobs
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: k8sJobNameAttrKey},
|
|
||||||
Operator: v3.FilterOperatorExists,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
jobAttrs, err := d.getMetadataAttributes(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
topJobGroups, allJobGroups, err := d.getTopJobGroups(ctx, req, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
groupFilters := map[string][]string{}
|
|
||||||
for _, topJobGroup := range topJobGroups {
|
|
||||||
for k, v := range topJobGroup {
|
|
||||||
groupFilters[k] = append(groupFilters[k], v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for groupKey, groupValues := range groupFilters {
|
|
||||||
hasGroupFilter := false
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
for _, filter := range req.Filters.Items {
|
|
||||||
if filter.Key.Key == groupKey {
|
|
||||||
hasGroupFilter = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasGroupFilter {
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: groupKey},
|
|
||||||
Value: groupValues,
|
|
||||||
Operator: v3.FilterOperatorIn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
records := []model.JobListRecord{}
|
|
||||||
|
|
||||||
for _, result := range formattedResponse {
|
|
||||||
for _, row := range result.Table.Rows {
|
|
||||||
|
|
||||||
record := model.JobListRecord{
|
|
||||||
JobName: "",
|
|
||||||
CPUUsage: -1,
|
|
||||||
CPURequest: -1,
|
|
||||||
CPULimit: -1,
|
|
||||||
MemoryUsage: -1,
|
|
||||||
MemoryRequest: -1,
|
|
||||||
MemoryLimit: -1,
|
|
||||||
DesiredSuccessfulPods: -1,
|
|
||||||
ActivePods: -1,
|
|
||||||
FailedPods: -1,
|
|
||||||
SuccessfulPods: -1,
|
|
||||||
}
|
|
||||||
|
|
||||||
if jobName, ok := row.Data[k8sJobNameAttrKey].(string); ok {
|
|
||||||
record.JobName = jobName
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpu, ok := row.Data["A"].(float64); ok {
|
|
||||||
record.CPUUsage = cpu
|
|
||||||
}
|
|
||||||
if cpuRequest, ok := row.Data["B"].(float64); ok {
|
|
||||||
record.CPURequest = cpuRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpuLimit, ok := row.Data["C"].(float64); ok {
|
|
||||||
record.CPULimit = cpuLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if memory, ok := row.Data["D"].(float64); ok {
|
|
||||||
record.MemoryUsage = memory
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryRequest, ok := row.Data["E"].(float64); ok {
|
|
||||||
record.MemoryRequest = memoryRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryLimit, ok := row.Data["F"].(float64); ok {
|
|
||||||
record.MemoryLimit = memoryLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if restarts, ok := row.Data["G"].(float64); ok {
|
|
||||||
record.Restarts = int(restarts)
|
|
||||||
}
|
|
||||||
|
|
||||||
if desiredSuccessfulPods, ok := row.Data["H"].(float64); ok {
|
|
||||||
record.DesiredSuccessfulPods = int(desiredSuccessfulPods)
|
|
||||||
}
|
|
||||||
|
|
||||||
if activePods, ok := row.Data["I"].(float64); ok {
|
|
||||||
record.ActivePods = int(activePods)
|
|
||||||
}
|
|
||||||
|
|
||||||
if failedPods, ok := row.Data["J"].(float64); ok {
|
|
||||||
record.FailedPods = int(failedPods)
|
|
||||||
}
|
|
||||||
|
|
||||||
if successfulPods, ok := row.Data["K"].(float64); ok {
|
|
||||||
record.SuccessfulPods = int(successfulPods)
|
|
||||||
}
|
|
||||||
|
|
||||||
record.Meta = map[string]string{}
|
|
||||||
if _, ok := jobAttrs[record.JobName]; ok {
|
|
||||||
record.Meta = jobAttrs[record.JobName]
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range row.Data {
|
|
||||||
if slices.Contains(jobQueryNames, k) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if labelValue, ok := v.(string); ok {
|
|
||||||
record.Meta[k] = labelValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
resp.Total = len(allJobGroups)
|
|
||||||
resp.Records = records
|
|
||||||
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
@@ -1,444 +0,0 @@
|
|||||||
package inframetrics
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"math"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/model"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
metricToUseForStatefulSets = "k8s_pod_cpu_utilization"
|
|
||||||
k8sStatefulSetNameAttrKey = "k8s_statefulset_name"
|
|
||||||
|
|
||||||
metricNamesForStatefulSets = map[string]string{
|
|
||||||
"desired_pods": "k8s_statefulset_desired_pods",
|
|
||||||
"available_pods": "k8s_statefulset_current_pods",
|
|
||||||
}
|
|
||||||
|
|
||||||
statefulSetAttrsToEnrich = []string{
|
|
||||||
"k8s_statefulset_name",
|
|
||||||
"k8s_namespace_name",
|
|
||||||
"k8s_cluster_name",
|
|
||||||
}
|
|
||||||
|
|
||||||
queryNamesForStatefulSets = map[string][]string{
|
|
||||||
"cpu": {"A"},
|
|
||||||
"cpu_request": {"B", "A"},
|
|
||||||
"cpu_limit": {"C", "A"},
|
|
||||||
"memory": {"D"},
|
|
||||||
"memory_request": {"E", "D"},
|
|
||||||
"memory_limit": {"F", "D"},
|
|
||||||
"restarts": {"G", "A"},
|
|
||||||
"desired_pods": {"H"},
|
|
||||||
"available_pods": {"I"},
|
|
||||||
}
|
|
||||||
|
|
||||||
builderQueriesForStatefulSets = map[string]*v3.BuilderQuery{
|
|
||||||
// desired pods
|
|
||||||
"H": {
|
|
||||||
QueryName: "H",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForStatefulSets["desired_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "H",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// available pods
|
|
||||||
"I": {
|
|
||||||
QueryName: "I",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForStatefulSets["available_pods"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "I",
|
|
||||||
ReduceTo: v3.ReduceToOperatorLast,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
statefulSetQueryNames = []string{"A", "B", "C", "D", "E", "F", "G", "H", "I"}
|
|
||||||
)
|
|
||||||
|
|
||||||
type StatefulSetsRepo struct {
|
|
||||||
reader interfaces.Reader
|
|
||||||
querierV2 interfaces.Querier
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewStatefulSetsRepo(reader interfaces.Reader, querierV2 interfaces.Querier) *StatefulSetsRepo {
|
|
||||||
return &StatefulSetsRepo{reader: reader, querierV2: querierV2}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *StatefulSetsRepo) GetStatefulSetAttributeKeys(ctx context.Context, req v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error) {
|
|
||||||
// TODO(srikanthccv): remove hardcoded metric name and support keys from any pod metric
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForStatefulSets
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeKeysResponse, err := d.reader.GetMetricAttributeKeys(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(srikanthccv): only return resource attributes when we have a way to
|
|
||||||
// distinguish between resource attributes and other attributes.
|
|
||||||
filteredKeys := []v3.AttributeKey{}
|
|
||||||
for _, key := range attributeKeysResponse.AttributeKeys {
|
|
||||||
if slices.Contains(pointAttrsToIgnore, key.Key) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
filteredKeys = append(filteredKeys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &v3.FilterAttributeKeyResponse{AttributeKeys: filteredKeys}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *StatefulSetsRepo) GetStatefulSetAttributeValues(ctx context.Context, req v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
|
|
||||||
req.DataSource = v3.DataSourceMetrics
|
|
||||||
req.AggregateAttribute = metricToUseForStatefulSets
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 50
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeValuesResponse, err := d.reader.GetMetricAttributeValues(ctx, &req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return attributeValuesResponse, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *StatefulSetsRepo) getMetadataAttributes(ctx context.Context, req model.StatefulSetListRequest) (map[string]map[string]string, error) {
|
|
||||||
statefulSetAttrs := map[string]map[string]string{}
|
|
||||||
|
|
||||||
for _, key := range statefulSetAttrsToEnrich {
|
|
||||||
hasKey := false
|
|
||||||
for _, groupByKey := range req.GroupBy {
|
|
||||||
if groupByKey.Key == key {
|
|
||||||
hasKey = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !hasKey {
|
|
||||||
req.GroupBy = append(req.GroupBy, v3.AttributeKey{Key: key})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mq := v3.BuilderQuery{
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricToUseForStatefulSets,
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
GroupBy: req.GroupBy,
|
|
||||||
}
|
|
||||||
|
|
||||||
query, err := helpers.PrepareTimeseriesFilterQuery(req.Start, req.End, &mq)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
query = localQueryToDistributedQuery(query)
|
|
||||||
|
|
||||||
attrsListResponse, err := d.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, row := range attrsListResponse {
|
|
||||||
stringData := map[string]string{}
|
|
||||||
for key, value := range row.Data {
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
stringData[key] = str
|
|
||||||
} else if strPtr, ok := value.(*string); ok {
|
|
||||||
stringData[key] = *strPtr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
statefulSetName := stringData[k8sStatefulSetNameAttrKey]
|
|
||||||
if _, ok := statefulSetAttrs[statefulSetName]; !ok {
|
|
||||||
statefulSetAttrs[statefulSetName] = map[string]string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, key := range req.GroupBy {
|
|
||||||
statefulSetAttrs[statefulSetName][key.Key] = stringData[key.Key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return statefulSetAttrs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *StatefulSetsRepo) getTopStatefulSetGroups(ctx context.Context, req model.StatefulSetListRequest, q *v3.QueryRangeParamsV3) ([]map[string]string, []map[string]string, error) {
|
|
||||||
step, timeSeriesTableName, samplesTableName := getParamsForTopStatefulSets(req)
|
|
||||||
|
|
||||||
queryNames := queryNamesForStatefulSets[req.OrderBy.ColumnName]
|
|
||||||
topStatefulSetGroupsQueryRangeParams := &v3.QueryRangeParamsV3{
|
|
||||||
Start: req.Start,
|
|
||||||
End: req.End,
|
|
||||||
Step: step,
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{},
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
PanelType: v3.PanelTypeTable,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, queryName := range queryNames {
|
|
||||||
query := q.CompositeQuery.BuilderQueries[queryName].Clone()
|
|
||||||
query.StepInterval = step
|
|
||||||
query.MetricTableHints = &v3.MetricTableHints{
|
|
||||||
TimeSeriesTableName: timeSeriesTableName,
|
|
||||||
SamplesTableName: samplesTableName,
|
|
||||||
}
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
topStatefulSetGroupsQueryRangeParams.CompositeQuery.BuilderQueries[queryName] = query
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, topStatefulSetGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, topStatefulSetGroupsQueryRangeParams)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(formattedResponse) == 0 || len(formattedResponse[0].Series) == 0 {
|
|
||||||
return nil, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy.Order == v3.DirectionDesc {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value > formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
sort.Slice(formattedResponse[0].Series, func(i, j int) bool {
|
|
||||||
return formattedResponse[0].Series[i].Points[0].Value < formattedResponse[0].Series[j].Points[0].Value
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
limit := math.Min(float64(req.Offset+req.Limit), float64(len(formattedResponse[0].Series)))
|
|
||||||
|
|
||||||
paginatedTopStatefulSetGroupsSeries := formattedResponse[0].Series[req.Offset:int(limit)]
|
|
||||||
|
|
||||||
topStatefulSetGroups := []map[string]string{}
|
|
||||||
for _, series := range paginatedTopStatefulSetGroupsSeries {
|
|
||||||
topStatefulSetGroups = append(topStatefulSetGroups, series.Labels)
|
|
||||||
}
|
|
||||||
allStatefulSetGroups := []map[string]string{}
|
|
||||||
for _, series := range formattedResponse[0].Series {
|
|
||||||
allStatefulSetGroups = append(allStatefulSetGroups, series.Labels)
|
|
||||||
}
|
|
||||||
|
|
||||||
return topStatefulSetGroups, allStatefulSetGroups, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *StatefulSetsRepo) GetStatefulSetList(ctx context.Context, req model.StatefulSetListRequest) (model.StatefulSetListResponse, error) {
|
|
||||||
resp := model.StatefulSetListResponse{}
|
|
||||||
|
|
||||||
if req.Limit == 0 {
|
|
||||||
req.Limit = 10
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.OrderBy == nil {
|
|
||||||
req.OrderBy = &v3.OrderBy{ColumnName: "cpu", Order: v3.DirectionDesc}
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.GroupBy == nil {
|
|
||||||
req.GroupBy = []v3.AttributeKey{{Key: k8sStatefulSetNameAttrKey}}
|
|
||||||
resp.Type = model.ResponseTypeList
|
|
||||||
} else {
|
|
||||||
resp.Type = model.ResponseTypeGroupedList
|
|
||||||
}
|
|
||||||
|
|
||||||
step := int64(math.Max(float64(common.MinAllowedStepInterval(req.Start, req.End)), 60))
|
|
||||||
|
|
||||||
query := WorkloadTableListQuery.Clone()
|
|
||||||
|
|
||||||
query.Start = req.Start
|
|
||||||
query.End = req.End
|
|
||||||
query.Step = step
|
|
||||||
|
|
||||||
// add additional queries for stateful sets
|
|
||||||
for _, statefulSetQuery := range builderQueriesForStatefulSets {
|
|
||||||
query.CompositeQuery.BuilderQueries[statefulSetQuery.QueryName] = statefulSetQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.StepInterval = step
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
if query.Filters == nil {
|
|
||||||
query.Filters = &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}}
|
|
||||||
}
|
|
||||||
query.Filters.Items = append(query.Filters.Items, req.Filters.Items...)
|
|
||||||
}
|
|
||||||
query.GroupBy = req.GroupBy
|
|
||||||
// make sure we only get records for daemon sets
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: k8sStatefulSetNameAttrKey},
|
|
||||||
Operator: v3.FilterOperatorExists,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
statefulSetAttrs, err := d.getMetadataAttributes(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
topStatefulSetGroups, allStatefulSetGroups, err := d.getTopStatefulSetGroups(ctx, req, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
groupFilters := map[string][]string{}
|
|
||||||
for _, topStatefulSetGroup := range topStatefulSetGroups {
|
|
||||||
for k, v := range topStatefulSetGroup {
|
|
||||||
groupFilters[k] = append(groupFilters[k], v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for groupKey, groupValues := range groupFilters {
|
|
||||||
hasGroupFilter := false
|
|
||||||
if req.Filters != nil && len(req.Filters.Items) > 0 {
|
|
||||||
for _, filter := range req.Filters.Items {
|
|
||||||
if filter.Key.Key == groupKey {
|
|
||||||
hasGroupFilter = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasGroupFilter {
|
|
||||||
for _, query := range query.CompositeQuery.BuilderQueries {
|
|
||||||
query.Filters.Items = append(query.Filters.Items, v3.FilterItem{
|
|
||||||
Key: v3.AttributeKey{Key: groupKey},
|
|
||||||
Value: groupValues,
|
|
||||||
Operator: v3.FilterOperatorIn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
queryResponse, _, err := d.querierV2.QueryRange(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
formattedResponse, err := postprocess.PostProcessResult(queryResponse, query)
|
|
||||||
if err != nil {
|
|
||||||
return resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
records := []model.StatefulSetListRecord{}
|
|
||||||
|
|
||||||
for _, result := range formattedResponse {
|
|
||||||
for _, row := range result.Table.Rows {
|
|
||||||
|
|
||||||
record := model.StatefulSetListRecord{
|
|
||||||
StatefulSetName: "",
|
|
||||||
CPUUsage: -1,
|
|
||||||
CPURequest: -1,
|
|
||||||
CPULimit: -1,
|
|
||||||
MemoryUsage: -1,
|
|
||||||
MemoryRequest: -1,
|
|
||||||
MemoryLimit: -1,
|
|
||||||
DesiredPods: -1,
|
|
||||||
AvailablePods: -1,
|
|
||||||
}
|
|
||||||
|
|
||||||
if statefulSetName, ok := row.Data[k8sStatefulSetNameAttrKey].(string); ok {
|
|
||||||
record.StatefulSetName = statefulSetName
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpu, ok := row.Data["A"].(float64); ok {
|
|
||||||
record.CPUUsage = cpu
|
|
||||||
}
|
|
||||||
if cpuRequest, ok := row.Data["B"].(float64); ok {
|
|
||||||
record.CPURequest = cpuRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if cpuLimit, ok := row.Data["C"].(float64); ok {
|
|
||||||
record.CPULimit = cpuLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if memory, ok := row.Data["D"].(float64); ok {
|
|
||||||
record.MemoryUsage = memory
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryRequest, ok := row.Data["E"].(float64); ok {
|
|
||||||
record.MemoryRequest = memoryRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
if memoryLimit, ok := row.Data["F"].(float64); ok {
|
|
||||||
record.MemoryLimit = memoryLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
if restarts, ok := row.Data["G"].(float64); ok {
|
|
||||||
record.Restarts = int(restarts)
|
|
||||||
}
|
|
||||||
|
|
||||||
if desiredPods, ok := row.Data["H"].(float64); ok {
|
|
||||||
record.DesiredPods = int(desiredPods)
|
|
||||||
}
|
|
||||||
|
|
||||||
if availablePods, ok := row.Data["I"].(float64); ok {
|
|
||||||
record.AvailablePods = int(availablePods)
|
|
||||||
}
|
|
||||||
|
|
||||||
record.Meta = map[string]string{}
|
|
||||||
if _, ok := statefulSetAttrs[record.StatefulSetName]; ok {
|
|
||||||
record.Meta = statefulSetAttrs[record.StatefulSetName]
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range row.Data {
|
|
||||||
if slices.Contains(statefulSetQueryNames, k) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if labelValue, ok := v.(string); ok {
|
|
||||||
record.Meta[k] = labelValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
resp.Total = len(allStatefulSetGroups)
|
|
||||||
resp.Records = records
|
|
||||||
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
package inframetrics
|
|
||||||
|
|
||||||
import v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
|
|
||||||
var (
|
|
||||||
metricNamesForWorkloads = map[string]string{
|
|
||||||
"cpu": "k8s_pod_cpu_utilization",
|
|
||||||
"cpu_req": "k8s_pod_cpu_request_utilization",
|
|
||||||
"cpu_limit": "k8s_pod_cpu_limit_utilization",
|
|
||||||
"memory": "k8s_pod_memory_usage",
|
|
||||||
"memory_req": "k8s_pod_memory_request_utilization",
|
|
||||||
"memory_limit": "k8s_pod_memory_limit_utilization",
|
|
||||||
"restarts": "k8s_container_restarts",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
var WorkloadTableListQuery = v3.QueryRangeParamsV3{
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
// pod cpu utilization
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["cpu"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "A",
|
|
||||||
ReduceTo: v3.ReduceToOperatorAvg,
|
|
||||||
TimeAggregation: v3.TimeAggregationAvg,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// pod cpu request utilization
|
|
||||||
"B": {
|
|
||||||
QueryName: "B",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["cpu_request"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "B",
|
|
||||||
ReduceTo: v3.ReduceToOperatorAvg,
|
|
||||||
TimeAggregation: v3.TimeAggregationAvg,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// pod cpu limit utilization
|
|
||||||
"C": {
|
|
||||||
QueryName: "C",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["cpu_limit"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "C",
|
|
||||||
ReduceTo: v3.ReduceToOperatorAvg,
|
|
||||||
TimeAggregation: v3.TimeAggregationAvg,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// pod memory utilization
|
|
||||||
"D": {
|
|
||||||
QueryName: "D",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["memory"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "D",
|
|
||||||
ReduceTo: v3.ReduceToOperatorAvg,
|
|
||||||
TimeAggregation: v3.TimeAggregationAvg,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// pod memory request utilization
|
|
||||||
"E": {
|
|
||||||
QueryName: "E",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["memory_request"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "E",
|
|
||||||
ReduceTo: v3.ReduceToOperatorAvg,
|
|
||||||
TimeAggregation: v3.TimeAggregationAvg,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
// pod memory limit utilization
|
|
||||||
"F": {
|
|
||||||
QueryName: "F",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["memory_limit"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "F",
|
|
||||||
ReduceTo: v3.ReduceToOperatorAvg,
|
|
||||||
TimeAggregation: v3.TimeAggregationAvg,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationSum,
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
"G": {
|
|
||||||
QueryName: "G",
|
|
||||||
DataSource: v3.DataSourceMetrics,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: metricNamesForWorkloads["restarts"],
|
|
||||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
|
||||||
},
|
|
||||||
Temporality: v3.Unspecified,
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{},
|
|
||||||
Expression: "G",
|
|
||||||
ReduceTo: v3.ReduceToOperatorSum,
|
|
||||||
TimeAggregation: v3.TimeAggregationAnyLast,
|
|
||||||
SpaceAggregation: v3.SpaceAggregationMax,
|
|
||||||
Functions: []v3.Function{{Name: v3.FunctionNameRunningDiff}},
|
|
||||||
Disabled: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
PanelType: v3.PanelTypeTable,
|
|
||||||
QueryType: v3.QueryTypeBuilder,
|
|
||||||
},
|
|
||||||
Version: "v4",
|
|
||||||
FormatForWeb: true,
|
|
||||||
}
|
|
||||||
@@ -142,7 +142,7 @@ func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.Attribu
|
|||||||
}
|
}
|
||||||
|
|
||||||
// check if the field is present in the fields map
|
// check if the field is present in the fields map
|
||||||
for _, key := range utils.GenerateEnrichmentKeys(field) {
|
for _, key := range utils.GenerateLogEnrichmentKeys(field) {
|
||||||
if val, ok := fields[key]; ok {
|
if val, ok := fields[key]; ok {
|
||||||
return val
|
return val
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -436,6 +436,8 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
|
|||||||
} else if panelType == v3.PanelTypeTable {
|
} else if panelType == v3.PanelTypeTable {
|
||||||
queryTmplPrefix =
|
queryTmplPrefix =
|
||||||
"SELECT"
|
"SELECT"
|
||||||
|
// step or aggregate interval is whole time period in case of table panel
|
||||||
|
step = (utils.GetEpochNanoSecs(end) - utils.GetEpochNanoSecs(start)) / NANOSECOND
|
||||||
} else if panelType == v3.PanelTypeGraph || panelType == v3.PanelTypeValue {
|
} else if panelType == v3.PanelTypeGraph || panelType == v3.PanelTypeValue {
|
||||||
// Select the aggregate value for interval
|
// Select the aggregate value for interval
|
||||||
queryTmplPrefix =
|
queryTmplPrefix =
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package cumulative
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
|
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||||
@@ -41,9 +40,6 @@ import (
|
|||||||
const (
|
const (
|
||||||
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
|
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
|
||||||
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
|
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
|
||||||
|
|
||||||
experimentalRateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
|
||||||
experimentalIncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window))`
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// prepareTimeAggregationSubQueryTimeSeries prepares the sub-query to be used for temporal aggregation
|
// prepareTimeAggregationSubQueryTimeSeries prepares the sub-query to be used for temporal aggregation
|
||||||
@@ -155,22 +151,14 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
|
|||||||
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
case v3.TimeAggregationRate:
|
case v3.TimeAggregationRate:
|
||||||
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
rateExp := rateWithoutNegative
|
|
||||||
if _, ok := os.LookupEnv("EXPERIMENTAL_RATE_WITHOUT_NEGATIVE"); ok {
|
|
||||||
rateExp = fmt.Sprintf(experimentalRateWithoutNegative, start)
|
|
||||||
}
|
|
||||||
rateQueryTmpl :=
|
rateQueryTmpl :=
|
||||||
"SELECT %s ts, " + rateExp +
|
"SELECT %s ts, " + rateWithoutNegative +
|
||||||
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
||||||
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
|
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
|
||||||
case v3.TimeAggregationIncrease:
|
case v3.TimeAggregationIncrease:
|
||||||
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||||
increaseExp := increaseWithoutNegative
|
|
||||||
if _, ok := os.LookupEnv("EXPERIMENTAL_INCREASE_WITHOUT_NEGATIVE"); ok {
|
|
||||||
increaseExp = experimentalIncreaseWithoutNegative
|
|
||||||
}
|
|
||||||
rateQueryTmpl :=
|
rateQueryTmpl :=
|
||||||
"SELECT %s ts, " + increaseExp +
|
"SELECT %s ts, " + increaseWithoutNegative +
|
||||||
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
||||||
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
|
subQuery = fmt.Sprintf(rateQueryTmpl, selectLabels, innerSubQuery)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import (
|
|||||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
"go.signoz.io/signoz/pkg/query-service/common"
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
|
||||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
@@ -53,8 +52,7 @@ type querier struct {
|
|||||||
returnedSeries []*v3.Series
|
returnedSeries []*v3.Series
|
||||||
returnedErr error
|
returnedErr error
|
||||||
|
|
||||||
UseLogsNewSchema bool
|
UseLogsNewSchema bool
|
||||||
UseTraceNewSchema bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type QuerierOptions struct {
|
type QuerierOptions struct {
|
||||||
@@ -310,121 +308,56 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
|||||||
return results, errQueriesByName, err
|
return results, errQueriesByName, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
||||||
res := make([]*v3.Result, 0)
|
res := make([]*v3.Result, 0)
|
||||||
qName := ""
|
qName := ""
|
||||||
pageSize := uint64(0)
|
pageSize := uint64(0)
|
||||||
limit := uint64(0)
|
|
||||||
offset := uint64(0)
|
|
||||||
|
|
||||||
// se we are considering only one query
|
// se we are considering only one query
|
||||||
for name, v := range params.CompositeQuery.BuilderQueries {
|
for name, v := range params.CompositeQuery.BuilderQueries {
|
||||||
qName = name
|
qName = name
|
||||||
pageSize = v.PageSize
|
pageSize = v.PageSize
|
||||||
|
|
||||||
// for traces specifically
|
|
||||||
limit = v.Limit
|
|
||||||
offset = v.Offset
|
|
||||||
}
|
}
|
||||||
data := []*v3.Row{}
|
data := []*v3.Row{}
|
||||||
|
|
||||||
tracesLimit := limit + offset
|
|
||||||
|
|
||||||
for _, v := range tsRanges {
|
for _, v := range tsRanges {
|
||||||
params.Start = v.Start
|
params.Start = v.Start
|
||||||
params.End = v.End
|
params.End = v.End
|
||||||
|
|
||||||
length := uint64(0)
|
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
|
||||||
|
queries, err := q.builder.PrepareQueries(params)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// this will to run only once
|
// this will to run only once
|
||||||
|
for name, query := range queries {
|
||||||
// appending the filter to get the next set of data
|
rowList, err := q.reader.GetListResultV3(ctx, query)
|
||||||
if params.CompositeQuery.BuilderQueries[qName].DataSource == v3.DataSourceLogs {
|
|
||||||
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
|
|
||||||
queries, err := q.builder.PrepareQueries(params)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
errs := []error{err}
|
||||||
}
|
errQuriesByName := map[string]error{
|
||||||
for name, query := range queries {
|
name: err,
|
||||||
rowList, err := q.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
errs := []error{err}
|
|
||||||
errQueriesByName := map[string]error{
|
|
||||||
name: err,
|
|
||||||
}
|
|
||||||
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
|
||||||
}
|
}
|
||||||
length += uint64(len(rowList))
|
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
||||||
data = append(data, rowList...)
|
|
||||||
}
|
}
|
||||||
|
data = append(data, rowList...)
|
||||||
|
}
|
||||||
|
|
||||||
if length > 0 {
|
// append a filter to the params
|
||||||
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
|
if len(data) > 0 {
|
||||||
Key: v3.AttributeKey{
|
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
|
||||||
Key: "id",
|
Key: v3.AttributeKey{
|
||||||
IsColumn: true,
|
Key: "id",
|
||||||
DataType: "string",
|
IsColumn: true,
|
||||||
},
|
DataType: "string",
|
||||||
Operator: v3.FilterOperatorLessThan,
|
},
|
||||||
Value: data[len(data)-1].Data["id"],
|
Operator: v3.FilterOperatorLessThan,
|
||||||
})
|
Value: data[len(data)-1].Data["id"],
|
||||||
}
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if uint64(len(data)) >= pageSize {
|
if uint64(len(data)) >= pageSize {
|
||||||
break
|
break
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// TRACE
|
|
||||||
// we are updating the offset and limit based on the number of traces we have found in the current timerange
|
|
||||||
// eg -
|
|
||||||
// 1)offset = 0, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
|
|
||||||
//
|
|
||||||
// if 100 traces are there in [t1, t10] then 100 will return immediately.
|
|
||||||
// if 10 traces are there in [t1, t10] then we get 10, set offset to 0 and limit to 90, search in the next timerange of [t10, 20]
|
|
||||||
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with offset=0, limit=100
|
|
||||||
|
|
||||||
//
|
|
||||||
// 2) offset = 50, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
|
|
||||||
//
|
|
||||||
// If we find 150 traces with limit=150 and offset=0 in [t1, t10] then we return immediately 100 traces
|
|
||||||
// If we find 50 in [t1, t10] with limit=150 and offset=0 then it will set limit = 100 and offset=0 and search in the next timerange of [t10, 20]
|
|
||||||
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with limit=150 and offset=0
|
|
||||||
|
|
||||||
// max limit + offset is 10k for pagination
|
|
||||||
if tracesLimit > constants.TRACE_V4_MAX_PAGINATION_LIMIT {
|
|
||||||
return nil, nil, fmt.Errorf("maximum traces that can be paginated is 10000")
|
|
||||||
}
|
|
||||||
|
|
||||||
params.CompositeQuery.BuilderQueries[qName].Offset = 0
|
|
||||||
params.CompositeQuery.BuilderQueries[qName].Limit = tracesLimit
|
|
||||||
queries, err := q.builder.PrepareQueries(params)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
for name, query := range queries {
|
|
||||||
rowList, err := q.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
errs := []error{err}
|
|
||||||
errQueriesByName := map[string]error{
|
|
||||||
name: err,
|
|
||||||
}
|
|
||||||
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
|
||||||
}
|
|
||||||
length += uint64(len(rowList))
|
|
||||||
|
|
||||||
// skip the traces unless offset is 0
|
|
||||||
for _, row := range rowList {
|
|
||||||
if offset == 0 {
|
|
||||||
data = append(data, row)
|
|
||||||
} else {
|
|
||||||
offset--
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tracesLimit = tracesLimit - length
|
|
||||||
|
|
||||||
if uint64(len(data)) >= limit {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res = append(res, &v3.Result{
|
res = append(res, &v3.Result{
|
||||||
@@ -435,25 +368,15 @@ func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryR
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
|
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
|
||||||
// List query has support for only one query
|
// List query has support for only one query.
|
||||||
// we are skipping for PanelTypeTrace as it has a custom order by regardless of what's in the payload
|
if q.UseLogsNewSchema && params.CompositeQuery != nil && len(params.CompositeQuery.BuilderQueries) == 1 {
|
||||||
if params.CompositeQuery != nil &&
|
|
||||||
len(params.CompositeQuery.BuilderQueries) == 1 &&
|
|
||||||
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
|
|
||||||
for _, v := range params.CompositeQuery.BuilderQueries {
|
for _, v := range params.CompositeQuery.BuilderQueries {
|
||||||
if (v.DataSource == v3.DataSourceLogs && !q.UseLogsNewSchema) ||
|
|
||||||
(v.DataSource == v3.DataSourceTraces && !q.UseTraceNewSchema) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// only allow of logs queries with timestamp ordering desc
|
// only allow of logs queries with timestamp ordering desc
|
||||||
// TODO(nitya): allow for timestamp asc
|
if v.DataSource == v3.DataSourceLogs && len(v.OrderBy) == 1 && v.OrderBy[0].ColumnName == "timestamp" && v.OrderBy[0].Order == "desc" {
|
||||||
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
|
startEndArr := utils.GetLogsListTsRanges(params.Start, params.End)
|
||||||
len(v.OrderBy) == 1 &&
|
if len(startEndArr) > 0 {
|
||||||
v.OrderBy[0].ColumnName == "timestamp" &&
|
return q.runLogsListQuery(ctx, params, startEndArr)
|
||||||
v.OrderBy[0].Order == "desc" {
|
}
|
||||||
startEndArr := utils.GetListTsRanges(params.Start, params.End)
|
|
||||||
return q.runWindowBasedListQuery(ctx, params, startEndArr)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -485,13 +408,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
|||||||
close(ch)
|
close(ch)
|
||||||
|
|
||||||
var errs []error
|
var errs []error
|
||||||
errQueriesByName := make(map[string]error)
|
errQuriesByName := make(map[string]error)
|
||||||
res := make([]*v3.Result, 0)
|
res := make([]*v3.Result, 0)
|
||||||
// read values from the channel
|
// read values from the channel
|
||||||
for r := range ch {
|
for r := range ch {
|
||||||
if r.Err != nil {
|
if r.Err != nil {
|
||||||
errs = append(errs, r.Err)
|
errs = append(errs, r.Err)
|
||||||
errQueriesByName[r.Name] = r.Err
|
errQuriesByName[r.Name] = r.Err
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, &v3.Result{
|
res = append(res, &v3.Result{
|
||||||
@@ -500,7 +423,7 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
if len(errs) != 0 {
|
if len(errs) != 0 {
|
||||||
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
||||||
}
|
}
|
||||||
return res, nil, nil
|
return res, nil, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,21 +5,15 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/cache/inmemory"
|
"go.signoz.io/signoz/pkg/query-service/cache/inmemory"
|
||||||
"go.signoz.io/signoz/pkg/query-service/featureManager"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func minTimestamp(series []*v3.Series) int64 {
|
func minTimestamp(series []*v3.Series) int64 {
|
||||||
@@ -1130,304 +1124,3 @@ func TestQueryRangeValueTypePromQL(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type regexMatcher struct {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *regexMatcher) Match(expectedSQL, actualSQL string) error {
|
|
||||||
re, err := regexp.Compile(expectedSQL)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !re.MatchString(actualSQL) {
|
|
||||||
return fmt.Errorf("expected query to contain %s, got %s", expectedSQL, actualSQL)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
|
||||||
params := &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
|
|
||||||
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
PanelType: v3.PanelTypeList,
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
Expression: "A",
|
|
||||||
DataSource: v3.DataSourceTraces,
|
|
||||||
PageSize: 10,
|
|
||||||
Limit: 100,
|
|
||||||
StepInterval: 60,
|
|
||||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
|
||||||
SelectColumns: []v3.AttributeKey{{Key: "serviceName"}},
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
tsRanges := []utils.LogsListTsRange{
|
|
||||||
{
|
|
||||||
Start: 1722259200000000000, // July 29, 2024 6:50:00 PM
|
|
||||||
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722252000000000000, // July 29, 2024 4:50:00 PM
|
|
||||||
End: 1722259200000000000, // July 29, 2024 6:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722237600000000000, // July 29, 2024 12:50:00 PM
|
|
||||||
End: 1722252000000000000, // July 29, 2024 4:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722208800000000000, // July 29, 2024 4:50:00 AM
|
|
||||||
End: 1722237600000000000, // July 29, 2024 12:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
|
|
||||||
End: 1722208800000000000, // July 29, 2024 4:50:00 AM
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
type queryParams struct {
|
|
||||||
start int64
|
|
||||||
end int64
|
|
||||||
limit uint64
|
|
||||||
offset uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
type queryResponse struct {
|
|
||||||
expectedQuery string
|
|
||||||
timestamps []uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
// create test struct with moc data i.e array of timestamps, limit, offset and expected results
|
|
||||||
testCases := []struct {
|
|
||||||
name string
|
|
||||||
queryResponses []queryResponse
|
|
||||||
queryParams queryParams
|
|
||||||
expectedTimestamps []int64
|
|
||||||
expectedError bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "should return correct timestamps when querying within time window",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 2",
|
|
||||||
timestamps: []uint64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 2,
|
|
||||||
offset: 0,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "all data not in first windows",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 3",
|
|
||||||
timestamps: []uint64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{1722253000000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 3,
|
|
||||||
offset: 0,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "data in multiple windows",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 5",
|
|
||||||
timestamps: []uint64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 3",
|
|
||||||
timestamps: []uint64{1722253000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 2",
|
|
||||||
timestamps: []uint64{1722237700000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722171576000000000' AND timestamp <= '1722208800000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 5,
|
|
||||||
offset: 0,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000, 1722237700000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "query with offset",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 7",
|
|
||||||
timestamps: []uint64{1722259210000000000, 1722259220000000000, 1722259230000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 4",
|
|
||||||
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{1722237700000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 4,
|
|
||||||
offset: 3,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722253000000000000, 1722254000000000000, 1722255000000000000, 1722237700000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "query with offset and limit- data spread across multiple windows",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 11",
|
|
||||||
timestamps: []uint64{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 11",
|
|
||||||
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 8",
|
|
||||||
timestamps: []uint64{1722237700000000000, 1722237800000000000, 1722237900000000000, 1722237910000000000, 1722237920000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 3",
|
|
||||||
timestamps: []uint64{1722208810000000000, 1722208820000000000, 1722208830000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 5,
|
|
||||||
offset: 6,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722237910000000000, 1722237920000000000, 1722208810000000000, 1722208820000000000, 1722208830000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "don't allow pagination to get more than 10k spans",
|
|
||||||
queryResponses: []queryResponse{},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 10,
|
|
||||||
offset: 9991,
|
|
||||||
},
|
|
||||||
expectedError: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cols := []cmock.ColumnType{
|
|
||||||
{Name: "timestamp", Type: "UInt64"},
|
|
||||||
{Name: "name", Type: "String"},
|
|
||||||
}
|
|
||||||
testName := "name"
|
|
||||||
|
|
||||||
options := clickhouseReader.NewOptions("", 0, 0, 0, "", "archiveNamespace")
|
|
||||||
|
|
||||||
// iterate over test data, create reader and run test
|
|
||||||
for _, tc := range testCases {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
// Setup mock
|
|
||||||
mock, err := cmock.NewClickHouseWithQueryMatcher(nil, ®exMatcher{})
|
|
||||||
require.NoError(t, err, "Failed to create ClickHouse mock")
|
|
||||||
|
|
||||||
// Configure mock responses
|
|
||||||
for _, response := range tc.queryResponses {
|
|
||||||
values := make([][]any, 0, len(response.timestamps))
|
|
||||||
for _, ts := range response.timestamps {
|
|
||||||
values = append(values, []any{&ts, &testName})
|
|
||||||
}
|
|
||||||
// if len(values) > 0 {
|
|
||||||
mock.ExpectQuery(response.expectedQuery).WillReturnRows(
|
|
||||||
cmock.NewRows(cols, values),
|
|
||||||
)
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create reader and querier
|
|
||||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
|
||||||
mock,
|
|
||||||
options,
|
|
||||||
nil,
|
|
||||||
"",
|
|
||||||
featureManager.StartManager(),
|
|
||||||
"",
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
|
|
||||||
q := &querier{
|
|
||||||
reader: reader,
|
|
||||||
builder: queryBuilder.NewQueryBuilder(
|
|
||||||
queryBuilder.QueryBuilderOptions{
|
|
||||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
|
||||||
},
|
|
||||||
featureManager.StartManager(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
// Update query parameters
|
|
||||||
params.Start = tc.queryParams.start
|
|
||||||
params.End = tc.queryParams.end
|
|
||||||
params.CompositeQuery.BuilderQueries["A"].Limit = tc.queryParams.limit
|
|
||||||
params.CompositeQuery.BuilderQueries["A"].Offset = tc.queryParams.offset
|
|
||||||
|
|
||||||
// Execute query
|
|
||||||
results, errMap, err := q.runWindowBasedListQuery(context.Background(), params, tsRanges)
|
|
||||||
|
|
||||||
if tc.expectedError {
|
|
||||||
require.Error(t, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
require.NoError(t, err, "Query execution failed")
|
|
||||||
require.Nil(t, errMap, "Unexpected error map in results")
|
|
||||||
require.Len(t, results, 1, "Expected exactly one result set")
|
|
||||||
|
|
||||||
result := results[0]
|
|
||||||
require.Equal(t, "A", result.QueryName, "Incorrect query name in results")
|
|
||||||
require.Len(t, result.List, len(tc.expectedTimestamps),
|
|
||||||
"Result count mismatch: got %d results, expected %d",
|
|
||||||
len(result.List), len(tc.expectedTimestamps))
|
|
||||||
|
|
||||||
for i, expected := range tc.expectedTimestamps {
|
|
||||||
require.Equal(t, expected, result.List[i].Timestamp.UnixNano(),
|
|
||||||
"Timestamp mismatch at index %d: got %d, expected %d",
|
|
||||||
i, result.List[i].Timestamp.UnixNano(), expected)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify mock expectations
|
|
||||||
err = mock.ExpectationsWereMet()
|
|
||||||
require.NoError(t, err, "Mock expectations were not met")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import (
|
|||||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/common"
|
"go.signoz.io/signoz/pkg/query-service/common"
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
|
||||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
@@ -49,11 +48,10 @@ type querier struct {
|
|||||||
testingMode bool
|
testingMode bool
|
||||||
queriesExecuted []string
|
queriesExecuted []string
|
||||||
// tuple of start and end time in milliseconds
|
// tuple of start and end time in milliseconds
|
||||||
timeRanges [][]int
|
timeRanges [][]int
|
||||||
returnedSeries []*v3.Series
|
returnedSeries []*v3.Series
|
||||||
returnedErr error
|
returnedErr error
|
||||||
UseLogsNewSchema bool
|
UseLogsNewSchema bool
|
||||||
UseTraceNewSchema bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type QuerierOptions struct {
|
type QuerierOptions struct {
|
||||||
@@ -310,121 +308,56 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
|||||||
return results, errQueriesByName, err
|
return results, errQueriesByName, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
||||||
res := make([]*v3.Result, 0)
|
res := make([]*v3.Result, 0)
|
||||||
qName := ""
|
qName := ""
|
||||||
pageSize := uint64(0)
|
pageSize := uint64(0)
|
||||||
limit := uint64(0)
|
|
||||||
offset := uint64(0)
|
|
||||||
|
|
||||||
// se we are considering only one query
|
// se we are considering only one query
|
||||||
for name, v := range params.CompositeQuery.BuilderQueries {
|
for name, v := range params.CompositeQuery.BuilderQueries {
|
||||||
qName = name
|
qName = name
|
||||||
pageSize = v.PageSize
|
pageSize = v.PageSize
|
||||||
|
|
||||||
// for traces specifically
|
|
||||||
limit = v.Limit
|
|
||||||
offset = v.Offset
|
|
||||||
}
|
}
|
||||||
data := []*v3.Row{}
|
data := []*v3.Row{}
|
||||||
|
|
||||||
tracesLimit := limit + offset
|
|
||||||
|
|
||||||
for _, v := range tsRanges {
|
for _, v := range tsRanges {
|
||||||
params.Start = v.Start
|
params.Start = v.Start
|
||||||
params.End = v.End
|
params.End = v.End
|
||||||
|
|
||||||
length := uint64(0)
|
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
|
||||||
|
queries, err := q.builder.PrepareQueries(params)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// this will to run only once
|
// this will to run only once
|
||||||
|
for name, query := range queries {
|
||||||
// appending the filter to get the next set of data
|
rowList, err := q.reader.GetListResultV3(ctx, query)
|
||||||
if params.CompositeQuery.BuilderQueries[qName].DataSource == v3.DataSourceLogs {
|
|
||||||
params.CompositeQuery.BuilderQueries[qName].PageSize = pageSize - uint64(len(data))
|
|
||||||
queries, err := q.builder.PrepareQueries(params)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
errs := []error{err}
|
||||||
}
|
errQuriesByName := map[string]error{
|
||||||
for name, query := range queries {
|
name: err,
|
||||||
rowList, err := q.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
errs := []error{err}
|
|
||||||
errQueriesByName := map[string]error{
|
|
||||||
name: err,
|
|
||||||
}
|
|
||||||
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
|
||||||
}
|
}
|
||||||
length += uint64(len(rowList))
|
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
||||||
data = append(data, rowList...)
|
|
||||||
}
|
}
|
||||||
|
data = append(data, rowList...)
|
||||||
|
}
|
||||||
|
|
||||||
if length > 0 {
|
// append a filter to the params
|
||||||
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
|
if len(data) > 0 {
|
||||||
Key: v3.AttributeKey{
|
params.CompositeQuery.BuilderQueries[qName].Filters.Items = append(params.CompositeQuery.BuilderQueries[qName].Filters.Items, v3.FilterItem{
|
||||||
Key: "id",
|
Key: v3.AttributeKey{
|
||||||
IsColumn: true,
|
Key: "id",
|
||||||
DataType: "string",
|
IsColumn: true,
|
||||||
},
|
DataType: "string",
|
||||||
Operator: v3.FilterOperatorLessThan,
|
},
|
||||||
Value: data[len(data)-1].Data["id"],
|
Operator: v3.FilterOperatorLessThan,
|
||||||
})
|
Value: data[len(data)-1].Data["id"],
|
||||||
}
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if uint64(len(data)) >= pageSize {
|
if uint64(len(data)) >= pageSize {
|
||||||
break
|
break
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// TRACE
|
|
||||||
// we are updating the offset and limit based on the number of traces we have found in the current timerange
|
|
||||||
// eg -
|
|
||||||
// 1)offset = 0, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
|
|
||||||
//
|
|
||||||
// if 100 traces are there in [t1, t10] then 100 will return immediately.
|
|
||||||
// if 10 traces are there in [t1, t10] then we get 10, set offset to 0 and limit to 90, search in the next timerange of [t10, 20]
|
|
||||||
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with offset=0, limit=100
|
|
||||||
|
|
||||||
//
|
|
||||||
// 2) offset = 50, limit = 100, tsRanges = [t1, t10], [t10, 20], [t20, t30]
|
|
||||||
//
|
|
||||||
// If we find 150 traces with limit=150 and offset=0 in [t1, t10] then we return immediately 100 traces
|
|
||||||
// If we find 50 in [t1, t10] with limit=150 and offset=0 then it will set limit = 100 and offset=0 and search in the next timerange of [t10, 20]
|
|
||||||
// if we don't find any trace in [t1, t10], then we search in [t10, 20] with limit=150 and offset=0
|
|
||||||
|
|
||||||
// max limit + offset is 10k for pagination
|
|
||||||
if tracesLimit > constants.TRACE_V4_MAX_PAGINATION_LIMIT {
|
|
||||||
return nil, nil, fmt.Errorf("maximum traces that can be paginated is 10000")
|
|
||||||
}
|
|
||||||
|
|
||||||
params.CompositeQuery.BuilderQueries[qName].Offset = 0
|
|
||||||
params.CompositeQuery.BuilderQueries[qName].Limit = tracesLimit
|
|
||||||
queries, err := q.builder.PrepareQueries(params)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
for name, query := range queries {
|
|
||||||
rowList, err := q.reader.GetListResultV3(ctx, query)
|
|
||||||
if err != nil {
|
|
||||||
errs := []error{err}
|
|
||||||
errQueriesByName := map[string]error{
|
|
||||||
name: err,
|
|
||||||
}
|
|
||||||
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
|
||||||
}
|
|
||||||
length += uint64(len(rowList))
|
|
||||||
|
|
||||||
// skip the traces unless offset is 0
|
|
||||||
for _, row := range rowList {
|
|
||||||
if offset == 0 {
|
|
||||||
data = append(data, row)
|
|
||||||
} else {
|
|
||||||
offset--
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tracesLimit = tracesLimit - length
|
|
||||||
|
|
||||||
if uint64(len(data)) >= limit {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res = append(res, &v3.Result{
|
res = append(res, &v3.Result{
|
||||||
@@ -436,24 +369,14 @@ func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryR
|
|||||||
|
|
||||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
|
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
|
||||||
// List query has support for only one query.
|
// List query has support for only one query.
|
||||||
// we are skipping for PanelTypeTrace as it has a custom order by regardless of what's in the payload
|
if q.UseLogsNewSchema && params.CompositeQuery != nil && len(params.CompositeQuery.BuilderQueries) == 1 {
|
||||||
if params.CompositeQuery != nil &&
|
|
||||||
len(params.CompositeQuery.BuilderQueries) == 1 &&
|
|
||||||
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
|
|
||||||
for _, v := range params.CompositeQuery.BuilderQueries {
|
for _, v := range params.CompositeQuery.BuilderQueries {
|
||||||
if (v.DataSource == v3.DataSourceLogs && !q.UseLogsNewSchema) ||
|
|
||||||
(v.DataSource == v3.DataSourceTraces && !q.UseTraceNewSchema) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// only allow of logs queries with timestamp ordering desc
|
// only allow of logs queries with timestamp ordering desc
|
||||||
// TODO(nitya): allow for timestamp asc
|
if v.DataSource == v3.DataSourceLogs && len(v.OrderBy) == 1 && v.OrderBy[0].ColumnName == "timestamp" && v.OrderBy[0].Order == "desc" {
|
||||||
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
|
startEndArr := utils.GetLogsListTsRanges(params.Start, params.End)
|
||||||
len(v.OrderBy) == 1 &&
|
if len(startEndArr) > 0 {
|
||||||
v.OrderBy[0].ColumnName == "timestamp" &&
|
return q.runLogsListQuery(ctx, params, startEndArr)
|
||||||
v.OrderBy[0].Order == "desc" {
|
}
|
||||||
startEndArr := utils.GetListTsRanges(params.Start, params.End)
|
|
||||||
return q.runWindowBasedListQuery(ctx, params, startEndArr)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -493,13 +416,13 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
|||||||
close(ch)
|
close(ch)
|
||||||
|
|
||||||
var errs []error
|
var errs []error
|
||||||
errQueriesByName := make(map[string]error)
|
errQuriesByName := make(map[string]error)
|
||||||
res := make([]*v3.Result, 0)
|
res := make([]*v3.Result, 0)
|
||||||
// read values from the channel
|
// read values from the channel
|
||||||
for r := range ch {
|
for r := range ch {
|
||||||
if r.Err != nil {
|
if r.Err != nil {
|
||||||
errs = append(errs, r.Err)
|
errs = append(errs, r.Err)
|
||||||
errQueriesByName[r.Name] = r.Err
|
errQuriesByName[r.Name] = r.Err
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
res = append(res, &v3.Result{
|
res = append(res, &v3.Result{
|
||||||
@@ -508,7 +431,7 @@ func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRan
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
if len(errs) != 0 {
|
if len(errs) != 0 {
|
||||||
return nil, errQueriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
return nil, errQuriesByName, fmt.Errorf("encountered multiple errors: %s", multierr.Combine(errs...))
|
||||||
}
|
}
|
||||||
return res, nil, nil
|
return res, nil, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,21 +5,15 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/clickhouseReader"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/cache/inmemory"
|
"go.signoz.io/signoz/pkg/query-service/cache/inmemory"
|
||||||
"go.signoz.io/signoz/pkg/query-service/featureManager"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func minTimestamp(series []*v3.Series) int64 {
|
func minTimestamp(series []*v3.Series) int64 {
|
||||||
@@ -804,8 +798,8 @@ func TestV2QueryRangeValueType(t *testing.T) {
|
|||||||
}
|
}
|
||||||
q := NewQuerier(opts)
|
q := NewQuerier(opts)
|
||||||
expectedTimeRangeInQueryString := []string{
|
expectedTimeRangeInQueryString := []string{
|
||||||
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000, 1675115580000+120*60*1000), // 31st Jan, 03:23:00 to 31st Jan, 05:23:00
|
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115520000, 1675115580000+120*60*1000), // 31st Jan, 03:23:00 to 31st Jan, 05:23:00
|
||||||
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115580000+120*60*1000, 1675115580000+180*60*1000), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
|
fmt.Sprintf("unix_milli >= %d AND unix_milli < %d", 1675115580000+120*60*1000, 1675115580000+180*60*1000), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
|
||||||
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675119196722)*int64(1000000), (1675126396722)*int64(1000000)), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
|
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675119196722)*int64(1000000), (1675126396722)*int64(1000000)), // 31st Jan, 05:23:00 to 31st Jan, 06:23:00
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1184,304 +1178,3 @@ func TestV2QueryRangeValueTypePromQL(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type regexMatcher struct {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *regexMatcher) Match(expectedSQL, actualSQL string) error {
|
|
||||||
re, err := regexp.Compile(expectedSQL)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !re.MatchString(actualSQL) {
|
|
||||||
return fmt.Errorf("expected query to contain %s, got %s", expectedSQL, actualSQL)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_querier_runWindowBasedListQuery(t *testing.T) {
|
|
||||||
params := &v3.QueryRangeParamsV3{
|
|
||||||
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
|
|
||||||
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
|
|
||||||
CompositeQuery: &v3.CompositeQuery{
|
|
||||||
PanelType: v3.PanelTypeList,
|
|
||||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
|
||||||
"A": {
|
|
||||||
QueryName: "A",
|
|
||||||
Expression: "A",
|
|
||||||
DataSource: v3.DataSourceTraces,
|
|
||||||
PageSize: 10,
|
|
||||||
Limit: 100,
|
|
||||||
StepInterval: 60,
|
|
||||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
|
||||||
SelectColumns: []v3.AttributeKey{{Key: "serviceName"}},
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
tsRanges := []utils.LogsListTsRange{
|
|
||||||
{
|
|
||||||
Start: 1722259200000000000, // July 29, 2024 6:50:00 PM
|
|
||||||
End: 1722262800000000000, // July 29, 2024 7:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722252000000000000, // July 29, 2024 4:50:00 PM
|
|
||||||
End: 1722259200000000000, // July 29, 2024 6:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722237600000000000, // July 29, 2024 12:50:00 PM
|
|
||||||
End: 1722252000000000000, // July 29, 2024 4:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722208800000000000, // July 29, 2024 4:50:00 AM
|
|
||||||
End: 1722237600000000000, // July 29, 2024 12:50:00 PM
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Start: 1722171576000000000, // July 28, 2024 6:29:36 PM
|
|
||||||
End: 1722208800000000000, // July 29, 2024 4:50:00 AM
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
type queryParams struct {
|
|
||||||
start int64
|
|
||||||
end int64
|
|
||||||
limit uint64
|
|
||||||
offset uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
type queryResponse struct {
|
|
||||||
expectedQuery string
|
|
||||||
timestamps []uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
// create test struct with moc data i.e array of timestamps, limit, offset and expected results
|
|
||||||
testCases := []struct {
|
|
||||||
name string
|
|
||||||
queryResponses []queryResponse
|
|
||||||
queryParams queryParams
|
|
||||||
expectedTimestamps []int64
|
|
||||||
expectedError bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "should return correct timestamps when querying within time window",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 2",
|
|
||||||
timestamps: []uint64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 2,
|
|
||||||
offset: 0,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "all data not in first windows",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 3",
|
|
||||||
timestamps: []uint64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{1722253000000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 3,
|
|
||||||
offset: 0,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "data in multiple windows",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 5",
|
|
||||||
timestamps: []uint64{1722259300000000000, 1722259400000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 3",
|
|
||||||
timestamps: []uint64{1722253000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 2",
|
|
||||||
timestamps: []uint64{1722237700000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722171576000000000' AND timestamp <= '1722208800000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 5,
|
|
||||||
offset: 0,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722259300000000000, 1722259400000000000, 1722253000000000000, 1722237700000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "query with offset",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 7",
|
|
||||||
timestamps: []uint64{1722259210000000000, 1722259220000000000, 1722259230000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 4",
|
|
||||||
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 1",
|
|
||||||
timestamps: []uint64{1722237700000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 4,
|
|
||||||
offset: 3,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722253000000000000, 1722254000000000000, 1722255000000000000, 1722237700000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "query with offset and limit- data spread across multiple windows",
|
|
||||||
queryResponses: []queryResponse{
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722259200000000000' AND timestamp <= '1722262800000000000').* DESC LIMIT 11",
|
|
||||||
timestamps: []uint64{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722252000000000000' AND timestamp <= '1722259200000000000').* DESC LIMIT 11",
|
|
||||||
timestamps: []uint64{1722253000000000000, 1722254000000000000, 1722255000000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722237600000000000' AND timestamp <= '1722252000000000000').* DESC LIMIT 8",
|
|
||||||
timestamps: []uint64{1722237700000000000, 1722237800000000000, 1722237900000000000, 1722237910000000000, 1722237920000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
expectedQuery: ".*(timestamp >= '1722208800000000000' AND timestamp <= '1722237600000000000').* DESC LIMIT 3",
|
|
||||||
timestamps: []uint64{1722208810000000000, 1722208820000000000, 1722208830000000000},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 5,
|
|
||||||
offset: 6,
|
|
||||||
},
|
|
||||||
expectedTimestamps: []int64{1722237910000000000, 1722237920000000000, 1722208810000000000, 1722208820000000000, 1722208830000000000},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "don't allow pagination to get more than 10k spans",
|
|
||||||
queryResponses: []queryResponse{},
|
|
||||||
queryParams: queryParams{
|
|
||||||
start: 1722171576000000000,
|
|
||||||
end: 1722262800000000000,
|
|
||||||
limit: 10,
|
|
||||||
offset: 9991,
|
|
||||||
},
|
|
||||||
expectedError: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cols := []cmock.ColumnType{
|
|
||||||
{Name: "timestamp", Type: "UInt64"},
|
|
||||||
{Name: "name", Type: "String"},
|
|
||||||
}
|
|
||||||
testName := "name"
|
|
||||||
|
|
||||||
options := clickhouseReader.NewOptions("", 0, 0, 0, "", "archiveNamespace")
|
|
||||||
|
|
||||||
// iterate over test data, create reader and run test
|
|
||||||
for _, tc := range testCases {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
// Setup mock
|
|
||||||
mock, err := cmock.NewClickHouseWithQueryMatcher(nil, ®exMatcher{})
|
|
||||||
require.NoError(t, err, "Failed to create ClickHouse mock")
|
|
||||||
|
|
||||||
// Configure mock responses
|
|
||||||
for _, response := range tc.queryResponses {
|
|
||||||
values := make([][]any, 0, len(response.timestamps))
|
|
||||||
for _, ts := range response.timestamps {
|
|
||||||
values = append(values, []any{&ts, &testName})
|
|
||||||
}
|
|
||||||
// if len(values) > 0 {
|
|
||||||
mock.ExpectQuery(response.expectedQuery).WillReturnRows(
|
|
||||||
cmock.NewRows(cols, values),
|
|
||||||
)
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create reader and querier
|
|
||||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
|
||||||
mock,
|
|
||||||
options,
|
|
||||||
nil,
|
|
||||||
"",
|
|
||||||
featureManager.StartManager(),
|
|
||||||
"",
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
|
|
||||||
q := &querier{
|
|
||||||
reader: reader,
|
|
||||||
builder: queryBuilder.NewQueryBuilder(
|
|
||||||
queryBuilder.QueryBuilderOptions{
|
|
||||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
|
||||||
},
|
|
||||||
featureManager.StartManager(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
// Update query parameters
|
|
||||||
params.Start = tc.queryParams.start
|
|
||||||
params.End = tc.queryParams.end
|
|
||||||
params.CompositeQuery.BuilderQueries["A"].Limit = tc.queryParams.limit
|
|
||||||
params.CompositeQuery.BuilderQueries["A"].Offset = tc.queryParams.offset
|
|
||||||
|
|
||||||
// Execute query
|
|
||||||
results, errMap, err := q.runWindowBasedListQuery(context.Background(), params, tsRanges)
|
|
||||||
|
|
||||||
if tc.expectedError {
|
|
||||||
require.Error(t, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
require.NoError(t, err, "Query execution failed")
|
|
||||||
require.Nil(t, errMap, "Unexpected error map in results")
|
|
||||||
require.Len(t, results, 1, "Expected exactly one result set")
|
|
||||||
|
|
||||||
result := results[0]
|
|
||||||
require.Equal(t, "A", result.QueryName, "Incorrect query name in results")
|
|
||||||
require.Len(t, result.List, len(tc.expectedTimestamps),
|
|
||||||
"Result count mismatch: got %d results, expected %d",
|
|
||||||
len(result.List), len(tc.expectedTimestamps))
|
|
||||||
|
|
||||||
for i, expected := range tc.expectedTimestamps {
|
|
||||||
require.Equal(t, expected, result.List[i].Timestamp.UnixNano(),
|
|
||||||
"Timestamp mismatch at index %d: got %d, expected %d",
|
|
||||||
i, result.List[i].Timestamp.UnixNano(), expected)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify mock expectations
|
|
||||||
err = mock.ExpectationsWereMet()
|
|
||||||
require.NoError(t, err, "Mock expectations were not met")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -116,9 +116,7 @@ func expressionToQuery(
|
|||||||
for _, tag := range qp.CompositeQuery.BuilderQueries[variable].GroupBy {
|
for _, tag := range qp.CompositeQuery.BuilderQueries[variable].GroupBy {
|
||||||
groupTags = append(groupTags, tag.Key)
|
groupTags = append(groupTags, tag.Key)
|
||||||
}
|
}
|
||||||
if qp.CompositeQuery.PanelType != v3.PanelTypeTable {
|
groupTags = append(groupTags, "ts")
|
||||||
groupTags = append(groupTags, "ts")
|
|
||||||
}
|
|
||||||
if joinUsing == "" {
|
if joinUsing == "" {
|
||||||
for _, tag := range groupTags {
|
for _, tag := range groupTags {
|
||||||
joinUsing += fmt.Sprintf("%s.`%s` as `%s`, ", variable, tag, tag)
|
joinUsing += fmt.Sprintf("%s.`%s` as `%s`, ", variable, tag, tag)
|
||||||
|
|||||||
@@ -498,11 +498,11 @@ var testLogsWithFormula = []struct {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ExpectedQuery: "SELECT A.`key1.1` as `key1.1`, A.value + B.value as value FROM (SELECT now() as ts, attributes_bool_value[indexOf(attributes_bool_key, 'key1.1')] as `key1.1`, " +
|
ExpectedQuery: "SELECT A.`key1.1` as `key1.1`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT now() as ts, attributes_bool_value[indexOf(attributes_bool_key, 'key1.1')] as `key1.1`, " +
|
||||||
"toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) AND attributes_bool_value[indexOf(attributes_bool_key, 'key1.1')] = true AND " +
|
"toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) AND attributes_bool_value[indexOf(attributes_bool_key, 'key1.1')] = true AND " +
|
||||||
"has(attributes_bool_key, 'key1.1') group by `key1.1` order by value DESC) as A INNER JOIN (SELECT now() as ts, attributes_bool_value[indexOf(attributes_bool_key, 'key1.1')] as `key1.1`, " +
|
"has(attributes_bool_key, 'key1.1') group by `key1.1` order by value DESC) as A INNER JOIN (SELECT now() as ts, attributes_bool_value[indexOf(attributes_bool_key, 'key1.1')] as `key1.1`, " +
|
||||||
"toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) AND attributes_bool_value[indexOf(attributes_bool_key, 'key1.2')] = true AND " +
|
"toFloat64(count(*)) as value from signoz_logs.distributed_logs where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) AND attributes_bool_value[indexOf(attributes_bool_key, 'key1.2')] = true AND " +
|
||||||
"has(attributes_bool_key, 'key1.1') group by `key1.1` order by value DESC) as B ON A.`key1.1` = B.`key1.1`",
|
"has(attributes_bool_key, 'key1.1') group by `key1.1` order by value DESC) as B ON A.`key1.1` = B.`key1.1` AND A.`ts` = B.`ts`",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "test formula with dot in filter and group by materialized attribute",
|
Name: "test formula with dot in filter and group by materialized attribute",
|
||||||
@@ -707,12 +707,12 @@ var testLogsWithFormulaV2 = []struct {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ExpectedQuery: "SELECT A.`key1.1` as `key1.1`, A.value + B.value as value FROM (SELECT attributes_bool['key1.1'] as `key1.1`, " +
|
ExpectedQuery: "SELECT A.`key1.1` as `key1.1`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT attributes_bool['key1.1'] as `key1.1`, " +
|
||||||
"toFloat64(count(*)) as value from signoz_logs.distributed_logs_v2 where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) AND (ts_bucket_start >= 1702977256 AND ts_bucket_start <= 1702982656) " +
|
"toFloat64(count(*)) as value from signoz_logs.distributed_logs_v2 where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) AND (ts_bucket_start >= 1702977256 AND ts_bucket_start <= 1702982656) " +
|
||||||
"AND attributes_bool['key1.1'] = true AND mapContains(attributes_bool, 'key1.1') AND mapContains(attributes_bool, 'key1.1') group by `key1.1` order by value DESC) as A INNER JOIN (SELECT " +
|
"AND attributes_bool['key1.1'] = true AND mapContains(attributes_bool, 'key1.1') AND mapContains(attributes_bool, 'key1.1') group by `key1.1` order by value DESC) as A INNER JOIN (SELECT " +
|
||||||
"attributes_bool['key1.1'] as `key1.1`, toFloat64(count(*)) as value from signoz_logs.distributed_logs_v2 where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) " +
|
"attributes_bool['key1.1'] as `key1.1`, toFloat64(count(*)) as value from signoz_logs.distributed_logs_v2 where (timestamp >= 1702979056000000000 AND timestamp <= 1702982656000000000) " +
|
||||||
"AND (ts_bucket_start >= 1702977256 AND ts_bucket_start <= 1702982656) AND attributes_bool['key1.2'] = true AND mapContains(attributes_bool, 'key1.2') AND " +
|
"AND (ts_bucket_start >= 1702977256 AND ts_bucket_start <= 1702982656) AND attributes_bool['key1.2'] = true AND mapContains(attributes_bool, 'key1.2') AND " +
|
||||||
"mapContains(attributes_bool, 'key1.1') group by `key1.1` order by value DESC) as B ON A.`key1.1` = B.`key1.1`",
|
"mapContains(attributes_bool, 'key1.1') group by `key1.1` order by value DESC) as B ON A.`key1.1` = B.`key1.1` AND A.`ts` = B.`ts`",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "test formula with dot in filter and group by materialized attribute",
|
Name: "test formula with dot in filter and group by materialized attribute",
|
||||||
|
|||||||
@@ -384,11 +384,6 @@ func LogCommentEnricher(next http.Handler) http.Handler {
|
|||||||
client = "api"
|
client = "api"
|
||||||
}
|
}
|
||||||
|
|
||||||
email, err := auth.GetEmailFromJwt(r.Context())
|
|
||||||
if err != nil {
|
|
||||||
zap.S().Errorf("error while getting email from jwt: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
kvs := map[string]string{
|
kvs := map[string]string{
|
||||||
"path": path,
|
"path": path,
|
||||||
"dashboardID": dashboardID,
|
"dashboardID": dashboardID,
|
||||||
@@ -397,7 +392,6 @@ func LogCommentEnricher(next http.Handler) http.Handler {
|
|||||||
"client": client,
|
"client": client,
|
||||||
"viewName": viewName,
|
"viewName": viewName,
|
||||||
"servicesTab": tab,
|
"servicesTab": tab,
|
||||||
"email": email,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = r.WithContext(context.WithValue(r.Context(), common.LogCommentKey, kvs))
|
r = r.WithContext(context.WithValue(r.Context(), common.LogCommentKey, kvs))
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import (
|
|||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
var AggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
|
var aggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
|
||||||
v3.AggregateOperatorP05: 0.05,
|
v3.AggregateOperatorP05: 0.05,
|
||||||
v3.AggregateOperatorP10: 0.10,
|
v3.AggregateOperatorP10: 0.10,
|
||||||
v3.AggregateOperatorP20: 0.20,
|
v3.AggregateOperatorP20: 0.20,
|
||||||
@@ -22,7 +22,7 @@ var AggregateOperatorToPercentile = map[v3.AggregateOperator]float64{
|
|||||||
v3.AggregateOperatorP99: 0.99,
|
v3.AggregateOperatorP99: 0.99,
|
||||||
}
|
}
|
||||||
|
|
||||||
var AggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{
|
var aggregateOperatorToSQLFunc = map[v3.AggregateOperator]string{
|
||||||
v3.AggregateOperatorAvg: "avg",
|
v3.AggregateOperatorAvg: "avg",
|
||||||
v3.AggregateOperatorMax: "max",
|
v3.AggregateOperatorMax: "max",
|
||||||
v3.AggregateOperatorMin: "min",
|
v3.AggregateOperatorMin: "min",
|
||||||
@@ -109,7 +109,7 @@ func getSelectLabels(aggregatorOperator v3.AggregateOperator, groupBy []v3.Attri
|
|||||||
return selectLabels
|
return selectLabels
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSelectKeys(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey) string {
|
func getSelectKeys(aggregatorOperator v3.AggregateOperator, groupBy []v3.AttributeKey) string {
|
||||||
var selectLabels []string
|
var selectLabels []string
|
||||||
if aggregatorOperator == v3.AggregateOperatorNoOp {
|
if aggregatorOperator == v3.AggregateOperatorNoOp {
|
||||||
return ""
|
return ""
|
||||||
@@ -173,7 +173,7 @@ func buildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
|
|||||||
conditions = append(conditions, fmt.Sprintf(operator, columnName, fmtVal))
|
conditions = append(conditions, fmt.Sprintf(operator, columnName, fmtVal))
|
||||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||||
if item.Key.IsColumn {
|
if item.Key.IsColumn {
|
||||||
subQuery, err := ExistsSubQueryForFixedColumn(item.Key, item.Operator)
|
subQuery, err := existsSubQueryForFixedColumn(item.Key, item.Operator)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@@ -199,7 +199,7 @@ func buildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
|
|||||||
return queryString, nil
|
return queryString, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func ExistsSubQueryForFixedColumn(key v3.AttributeKey, op v3.FilterOperator) (string, error) {
|
func existsSubQueryForFixedColumn(key v3.AttributeKey, op v3.FilterOperator) (string, error) {
|
||||||
if key.DataType == v3.AttributeKeyDataTypeString {
|
if key.DataType == v3.AttributeKeyDataTypeString {
|
||||||
if op == v3.FilterOperatorExists {
|
if op == v3.FilterOperatorExists {
|
||||||
return fmt.Sprintf("%s %s ''", key.Key, tracesOperatorMappingV3[v3.FilterOperatorNotEqual]), nil
|
return fmt.Sprintf("%s %s ''", key.Key, tracesOperatorMappingV3[v3.FilterOperatorNotEqual]), nil
|
||||||
@@ -244,7 +244,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
|
|
||||||
selectLabels := getSelectLabels(mq.AggregateOperator, mq.GroupBy)
|
selectLabels := getSelectLabels(mq.AggregateOperator, mq.GroupBy)
|
||||||
|
|
||||||
having := Having(mq.Having)
|
having := having(mq.Having)
|
||||||
if having != "" {
|
if having != "" {
|
||||||
having = " having " + having
|
having = " having " + having
|
||||||
}
|
}
|
||||||
@@ -272,7 +272,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
|
|
||||||
// we don't need value for first query
|
// we don't need value for first query
|
||||||
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
||||||
queryTmpl = "SELECT " + GetSelectKeys(mq.AggregateOperator, mq.GroupBy) + " from (" + queryTmpl + ")"
|
queryTmpl = "SELECT " + getSelectKeys(mq.AggregateOperator, mq.GroupBy) + " from (" + queryTmpl + ")"
|
||||||
}
|
}
|
||||||
|
|
||||||
emptyValuesInGroupByFilter, err := handleEmptyValuesInGroupBy(mq.GroupBy)
|
emptyValuesInGroupByFilter, err := handleEmptyValuesInGroupBy(mq.GroupBy)
|
||||||
@@ -281,7 +281,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
}
|
}
|
||||||
filterSubQuery += emptyValuesInGroupByFilter
|
filterSubQuery += emptyValuesInGroupByFilter
|
||||||
|
|
||||||
groupBy := GroupByAttributeKeyTags(panelType, options.GraphLimitQtype, mq.GroupBy...)
|
groupBy := groupByAttributeKeyTags(panelType, options.GraphLimitQtype, mq.GroupBy...)
|
||||||
if groupBy != "" {
|
if groupBy != "" {
|
||||||
groupBy = " group by " + groupBy
|
groupBy = " group by " + groupBy
|
||||||
}
|
}
|
||||||
@@ -291,7 +291,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
}
|
}
|
||||||
|
|
||||||
if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
||||||
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", GetSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "%s)"
|
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", getSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "%s)"
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregationKey := ""
|
aggregationKey := ""
|
||||||
@@ -311,7 +311,7 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
rate = rate / 60.0
|
rate = rate / 60.0
|
||||||
}
|
}
|
||||||
|
|
||||||
op := fmt.Sprintf("%s(%s)/%f", AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
|
op := fmt.Sprintf("%s(%s)/%f", aggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
||||||
return query, nil
|
return query, nil
|
||||||
case
|
case
|
||||||
@@ -324,17 +324,17 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
v3.AggregateOperatorP90,
|
v3.AggregateOperatorP90,
|
||||||
v3.AggregateOperatorP95,
|
v3.AggregateOperatorP95,
|
||||||
v3.AggregateOperatorP99:
|
v3.AggregateOperatorP99:
|
||||||
op := fmt.Sprintf("quantile(%v)(%s)", AggregateOperatorToPercentile[mq.AggregateOperator], aggregationKey)
|
op := fmt.Sprintf("quantile(%v)(%s)", aggregateOperatorToPercentile[mq.AggregateOperator], aggregationKey)
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
||||||
return query, nil
|
return query, nil
|
||||||
case v3.AggregateOperatorAvg, v3.AggregateOperatorSum, v3.AggregateOperatorMin, v3.AggregateOperatorMax:
|
case v3.AggregateOperatorAvg, v3.AggregateOperatorSum, v3.AggregateOperatorMin, v3.AggregateOperatorMax:
|
||||||
op := fmt.Sprintf("%s(%s)", AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey)
|
op := fmt.Sprintf("%s(%s)", aggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey)
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
||||||
return query, nil
|
return query, nil
|
||||||
case v3.AggregateOperatorCount:
|
case v3.AggregateOperatorCount:
|
||||||
if mq.AggregateAttribute.Key != "" {
|
if mq.AggregateAttribute.Key != "" {
|
||||||
if mq.AggregateAttribute.IsColumn {
|
if mq.AggregateAttribute.IsColumn {
|
||||||
subQuery, err := ExistsSubQueryForFixedColumn(mq.AggregateAttribute, v3.FilterOperatorExists)
|
subQuery, err := existsSubQueryForFixedColumn(mq.AggregateAttribute, v3.FilterOperatorExists)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
filterSubQuery = fmt.Sprintf("%s AND %s", filterSubQuery, subQuery)
|
filterSubQuery = fmt.Sprintf("%s AND %s", filterSubQuery, subQuery)
|
||||||
}
|
}
|
||||||
@@ -354,9 +354,9 @@ func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, _ string, pan
|
|||||||
var query string
|
var query string
|
||||||
if panelType == v3.PanelTypeTrace {
|
if panelType == v3.PanelTypeTrace {
|
||||||
withSubQuery := fmt.Sprintf(constants.TracesExplorerViewSQLSelectWithSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_LOCAL_TABLENAME, spanIndexTableTimeFilter, filterSubQuery)
|
withSubQuery := fmt.Sprintf(constants.TracesExplorerViewSQLSelectWithSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_LOCAL_TABLENAME, spanIndexTableTimeFilter, filterSubQuery)
|
||||||
withSubQuery = AddLimitToQuery(withSubQuery, mq.Limit)
|
withSubQuery = addLimitToQuery(withSubQuery, mq.Limit)
|
||||||
if mq.Offset != 0 {
|
if mq.Offset != 0 {
|
||||||
withSubQuery = AddOffsetToQuery(withSubQuery, mq.Offset)
|
withSubQuery = addOffsetToQuery(withSubQuery, mq.Offset)
|
||||||
}
|
}
|
||||||
// query = withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME)
|
// query = withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME)
|
||||||
query = fmt.Sprintf(constants.TracesExplorerViewSQLSelectBeforeSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME) + withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectAfterSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, spanIndexTableTimeFilter)
|
query = fmt.Sprintf(constants.TracesExplorerViewSQLSelectBeforeSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME) + withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectAfterSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_TABLENAME, spanIndexTableTimeFilter)
|
||||||
@@ -403,7 +403,7 @@ func groupBy(panelType v3.PanelType, graphLimitQtype string, tags ...string) str
|
|||||||
return strings.Join(tags, ",")
|
return strings.Join(tags, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
func GroupByAttributeKeyTags(panelType v3.PanelType, graphLimitQtype string, tags ...v3.AttributeKey) string {
|
func groupByAttributeKeyTags(panelType v3.PanelType, graphLimitQtype string, tags ...v3.AttributeKey) string {
|
||||||
groupTags := []string{}
|
groupTags := []string{}
|
||||||
for _, tag := range tags {
|
for _, tag := range tags {
|
||||||
groupTags = append(groupTags, fmt.Sprintf("`%s`", tag.Key))
|
groupTags = append(groupTags, fmt.Sprintf("`%s`", tag.Key))
|
||||||
@@ -456,7 +456,7 @@ func orderByAttributeKeyTags(panelType v3.PanelType, items []v3.OrderBy, tags []
|
|||||||
return str
|
return str
|
||||||
}
|
}
|
||||||
|
|
||||||
func Having(items []v3.Having) string {
|
func having(items []v3.Having) string {
|
||||||
// aggregate something and filter on that aggregate
|
// aggregate something and filter on that aggregate
|
||||||
var having []string
|
var having []string
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
@@ -465,7 +465,7 @@ func Having(items []v3.Having) string {
|
|||||||
return strings.Join(having, " AND ")
|
return strings.Join(having, " AND ")
|
||||||
}
|
}
|
||||||
|
|
||||||
func ReduceToQuery(query string, reduceTo v3.ReduceToOperator, _ v3.AggregateOperator) (string, error) {
|
func reduceToQuery(query string, reduceTo v3.ReduceToOperator, _ v3.AggregateOperator) (string, error) {
|
||||||
|
|
||||||
var groupBy string
|
var groupBy string
|
||||||
switch reduceTo {
|
switch reduceTo {
|
||||||
@@ -485,14 +485,14 @@ func ReduceToQuery(query string, reduceTo v3.ReduceToOperator, _ v3.AggregateOpe
|
|||||||
return query, nil
|
return query, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddLimitToQuery(query string, limit uint64) string {
|
func addLimitToQuery(query string, limit uint64) string {
|
||||||
if limit == 0 {
|
if limit == 0 {
|
||||||
limit = 100
|
limit = 100
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s LIMIT %d", query, limit)
|
return fmt.Sprintf("%s LIMIT %d", query, limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddOffsetToQuery(query string, offset uint64) string {
|
func addOffsetToQuery(query string, offset uint64) string {
|
||||||
return fmt.Sprintf("%s OFFSET %d", query, offset)
|
return fmt.Sprintf("%s OFFSET %d", query, offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -513,7 +513,7 @@ func PrepareTracesQuery(start, end int64, panelType v3.PanelType, mq *v3.Builder
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
query = AddLimitToQuery(query, mq.Limit)
|
query = addLimitToQuery(query, mq.Limit)
|
||||||
|
|
||||||
return query, nil
|
return query, nil
|
||||||
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
||||||
@@ -529,13 +529,13 @@ func PrepareTracesQuery(start, end int64, panelType v3.PanelType, mq *v3.Builder
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if panelType == v3.PanelTypeValue {
|
if panelType == v3.PanelTypeValue {
|
||||||
query, err = ReduceToQuery(query, mq.ReduceTo, mq.AggregateOperator)
|
query, err = reduceToQuery(query, mq.ReduceTo, mq.AggregateOperator)
|
||||||
}
|
}
|
||||||
if panelType == v3.PanelTypeList || panelType == v3.PanelTypeTable {
|
if panelType == v3.PanelTypeList || panelType == v3.PanelTypeTable {
|
||||||
query = AddLimitToQuery(query, mq.Limit)
|
query = addLimitToQuery(query, mq.Limit)
|
||||||
|
|
||||||
if mq.Offset != 0 {
|
if mq.Offset != 0 {
|
||||||
query = AddOffsetToQuery(query, mq.Offset)
|
query = addOffsetToQuery(query, mq.Offset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return query, err
|
return query, err
|
||||||
|
|||||||
@@ -1,118 +0,0 @@
|
|||||||
package v4
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
// if the field is timestamp/id/value we don't need to enrich
|
|
||||||
// if the field is static we don't need to enrich
|
|
||||||
// for all others we need to enrich
|
|
||||||
// an attribute/resource can be materialized/dematerialized
|
|
||||||
// but the query should work regardless and shouldn't fail
|
|
||||||
func isEnriched(field v3.AttributeKey) bool {
|
|
||||||
// if it is timestamp/id dont check
|
|
||||||
if field.Key == "timestamp" || field.Key == constants.SigNozOrderByValue {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// we need to check if the field is static and return false if isColumn is not set
|
|
||||||
if _, ok := constants.StaticFieldsTraces[field.Key]; ok && field.IsColumn {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func enrichKeyWithMetadata(key v3.AttributeKey, keys map[string]v3.AttributeKey) v3.AttributeKey {
|
|
||||||
if isEnriched(key) {
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
|
|
||||||
if v, ok := constants.StaticFieldsTraces[key.Key]; ok {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, key := range utils.GenerateEnrichmentKeys(key) {
|
|
||||||
if val, ok := keys[key]; ok {
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// enrich with default values if metadata is not found
|
|
||||||
if key.Type == "" {
|
|
||||||
key.Type = v3.AttributeKeyTypeTag
|
|
||||||
}
|
|
||||||
if key.DataType == "" {
|
|
||||||
key.DataType = v3.AttributeKeyDataTypeString
|
|
||||||
}
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
|
|
||||||
func Enrich(params *v3.QueryRangeParamsV3, keys map[string]v3.AttributeKey) {
|
|
||||||
if params.CompositeQuery.QueryType != v3.QueryTypeBuilder {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, query := range params.CompositeQuery.BuilderQueries {
|
|
||||||
if query.DataSource == v3.DataSourceTraces {
|
|
||||||
EnrichTracesQuery(query, keys)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func EnrichTracesQuery(query *v3.BuilderQuery, keys map[string]v3.AttributeKey) {
|
|
||||||
// enrich aggregate attribute
|
|
||||||
query.AggregateAttribute = enrichKeyWithMetadata(query.AggregateAttribute, keys)
|
|
||||||
|
|
||||||
// enrich filter items
|
|
||||||
if query.Filters != nil && len(query.Filters.Items) > 0 {
|
|
||||||
for idx, filter := range query.Filters.Items {
|
|
||||||
query.Filters.Items[idx].Key = enrichKeyWithMetadata(filter.Key, keys)
|
|
||||||
// if the serviceName column is used, use the corresponding resource attribute as well during filtering
|
|
||||||
// since there is only one of these resource attributes we are adding it here directly.
|
|
||||||
// move it somewhere else if this list is big
|
|
||||||
if filter.Key.Key == "serviceName" {
|
|
||||||
query.Filters.Items[idx].Key = v3.AttributeKey{
|
|
||||||
Key: "service.name",
|
|
||||||
DataType: v3.AttributeKeyDataTypeString,
|
|
||||||
Type: v3.AttributeKeyTypeResource,
|
|
||||||
IsColumn: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// enrich group by
|
|
||||||
for idx, groupBy := range query.GroupBy {
|
|
||||||
query.GroupBy[idx] = enrichKeyWithMetadata(groupBy, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
// enrich order by
|
|
||||||
query.OrderBy = enrichOrderBy(query.OrderBy, keys)
|
|
||||||
|
|
||||||
// enrich select columns
|
|
||||||
for idx, selectColumn := range query.SelectColumns {
|
|
||||||
query.SelectColumns[idx] = enrichKeyWithMetadata(selectColumn, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func enrichOrderBy(items []v3.OrderBy, keys map[string]v3.AttributeKey) []v3.OrderBy {
|
|
||||||
enrichedItems := []v3.OrderBy{}
|
|
||||||
for i := 0; i < len(items); i++ {
|
|
||||||
attributeKey := enrichKeyWithMetadata(v3.AttributeKey{
|
|
||||||
Key: items[i].ColumnName,
|
|
||||||
}, keys)
|
|
||||||
enrichedItems = append(enrichedItems, v3.OrderBy{
|
|
||||||
ColumnName: items[i].ColumnName,
|
|
||||||
Order: items[i].Order,
|
|
||||||
Key: attributeKey.Key,
|
|
||||||
DataType: attributeKey.DataType,
|
|
||||||
Type: attributeKey.Type,
|
|
||||||
IsColumn: attributeKey.IsColumn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return enrichedItems
|
|
||||||
}
|
|
||||||
@@ -1,196 +0,0 @@
|
|||||||
package v4
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestEnrichTracesQuery(t *testing.T) {
|
|
||||||
type args struct {
|
|
||||||
query *v3.BuilderQuery
|
|
||||||
keys map[string]v3.AttributeKey
|
|
||||||
want *v3.BuilderQuery
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "test 1",
|
|
||||||
args: args{
|
|
||||||
query: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag}, Value: 100, Operator: ">"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{},
|
|
||||||
},
|
|
||||||
keys: map[string]v3.AttributeKey{
|
|
||||||
"bytes##tag##int64": {Key: "bytes", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag},
|
|
||||||
},
|
|
||||||
want: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "bytes", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeInt64}, Value: 100, Operator: ">"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test service name",
|
|
||||||
args: args{
|
|
||||||
query: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "serviceName", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "myservice", Operator: "="},
|
|
||||||
{Key: v3.AttributeKey{Key: "serviceName"}, Value: "myservice", Operator: "="},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{},
|
|
||||||
},
|
|
||||||
keys: map[string]v3.AttributeKey{},
|
|
||||||
want: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "service.name", Type: v3.AttributeKeyTypeResource, DataType: v3.AttributeKeyDataTypeString}, Value: "myservice", Operator: "="},
|
|
||||||
{Key: v3.AttributeKey{Key: "service.name", Type: v3.AttributeKeyTypeResource, DataType: v3.AttributeKeyDataTypeString}, Value: "myservice", Operator: "="},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test mat attrs",
|
|
||||||
args: args{
|
|
||||||
query: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "/api", Operator: "="},
|
|
||||||
{Key: v3.AttributeKey{Key: "msgSystem"}, Value: "name", Operator: "="},
|
|
||||||
{Key: v3.AttributeKey{Key: "external_http_url"}, Value: "name", Operator: "="},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{},
|
|
||||||
},
|
|
||||||
keys: map[string]v3.AttributeKey{},
|
|
||||||
want: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "/api", Operator: "="},
|
|
||||||
{Key: v3.AttributeKey{Key: "msgSystem", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "name", Operator: "="},
|
|
||||||
{Key: v3.AttributeKey{Key: "external_http_url", DataType: v3.AttributeKeyDataTypeString, IsColumn: true}, Value: "name", Operator: "="},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test aggregateattr, filter, groupby, order by",
|
|
||||||
args: args{
|
|
||||||
query: &v3.BuilderQuery{
|
|
||||||
AggregateOperator: v3.AggregateOperatorCount,
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: "http.route",
|
|
||||||
DataType: v3.AttributeKeyDataTypeString,
|
|
||||||
Type: v3.AttributeKeyTypeTag,
|
|
||||||
},
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString}, Value: "/api", Operator: "="},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{
|
|
||||||
{Key: "http.route", DataType: v3.AttributeKeyDataTypeString},
|
|
||||||
{Key: "msgSystem", DataType: v3.AttributeKeyDataTypeString},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{
|
|
||||||
{ColumnName: "httpRoute", Order: v3.DirectionAsc},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
keys: map[string]v3.AttributeKey{
|
|
||||||
"http.route##tag##string": {Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
|
|
||||||
},
|
|
||||||
want: &v3.BuilderQuery{
|
|
||||||
AggregateAttribute: v3.AttributeKey{
|
|
||||||
Key: "http.route",
|
|
||||||
DataType: v3.AttributeKeyDataTypeString,
|
|
||||||
Type: v3.AttributeKeyTypeTag,
|
|
||||||
IsColumn: true,
|
|
||||||
},
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true}, Value: "/api", Operator: "="},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
GroupBy: []v3.AttributeKey{
|
|
||||||
{Key: "http.route", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
|
|
||||||
{Key: "msgSystem", DataType: v3.AttributeKeyDataTypeString, IsJSON: false, IsColumn: true},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{
|
|
||||||
{Key: "httpRoute", Order: v3.DirectionAsc, ColumnName: "httpRoute", DataType: v3.AttributeKeyDataTypeString, IsColumn: true},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "enrich default values",
|
|
||||||
args: args{
|
|
||||||
query: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Items: []v3.FilterItem{
|
|
||||||
{Key: v3.AttributeKey{Key: "testattr"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
OrderBy: []v3.OrderBy{{ColumnName: "timestamp", Order: v3.DirectionAsc}},
|
|
||||||
},
|
|
||||||
keys: map[string]v3.AttributeKey{},
|
|
||||||
want: &v3.BuilderQuery{
|
|
||||||
Filters: &v3.FilterSet{
|
|
||||||
Items: []v3.FilterItem{{Key: v3.AttributeKey{Key: "testattr", Type: v3.AttributeKeyTypeTag, DataType: v3.AttributeKeyDataTypeString}}},
|
|
||||||
},
|
|
||||||
// isColumn won't matter in timestamp as it will always be a column
|
|
||||||
OrderBy: []v3.OrderBy{{Key: "timestamp", Order: v3.DirectionAsc, ColumnName: "timestamp"}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
EnrichTracesQuery(tt.args.query, tt.args.keys)
|
|
||||||
// Check AggregateAttribute
|
|
||||||
if tt.args.query.AggregateAttribute.Key != "" && !reflect.DeepEqual(tt.args.query.AggregateAttribute, tt.args.want.AggregateAttribute) {
|
|
||||||
t.Errorf("EnrichTracesQuery() AggregateAttribute = %v, want %v", tt.args.query.AggregateAttribute, tt.args.want.AggregateAttribute)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check Filters
|
|
||||||
if tt.args.query.Filters != nil && !reflect.DeepEqual(tt.args.query.Filters, tt.args.want.Filters) {
|
|
||||||
t.Errorf("EnrichTracesQuery() Filters = %v, want %v", tt.args.query.Filters, tt.args.want.Filters)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check GroupBy
|
|
||||||
if tt.args.query.GroupBy != nil && !reflect.DeepEqual(tt.args.query.GroupBy, tt.args.want.GroupBy) {
|
|
||||||
t.Errorf("EnrichTracesQuery() GroupBy = %v, want %v", tt.args.query.GroupBy, tt.args.want.GroupBy)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check OrderBy
|
|
||||||
if tt.args.query.OrderBy != nil && !reflect.DeepEqual(tt.args.query.OrderBy, tt.args.want.OrderBy) {
|
|
||||||
t.Errorf("EnrichTracesQuery() OrderBy = %v, want %v", tt.args.query.OrderBy, tt.args.want.OrderBy)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,414 +0,0 @@
|
|||||||
package v4
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/app/resource"
|
|
||||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
|
||||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
|
||||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
const NANOSECOND = 1000000000
|
|
||||||
|
|
||||||
var tracesOperatorMappingV3 = map[v3.FilterOperator]string{
|
|
||||||
v3.FilterOperatorIn: "IN",
|
|
||||||
v3.FilterOperatorNotIn: "NOT IN",
|
|
||||||
v3.FilterOperatorEqual: "=",
|
|
||||||
v3.FilterOperatorNotEqual: "!=",
|
|
||||||
v3.FilterOperatorLessThan: "<",
|
|
||||||
v3.FilterOperatorLessThanOrEq: "<=",
|
|
||||||
v3.FilterOperatorGreaterThan: ">",
|
|
||||||
v3.FilterOperatorGreaterThanOrEq: ">=",
|
|
||||||
v3.FilterOperatorLike: "ILIKE",
|
|
||||||
v3.FilterOperatorNotLike: "NOT ILIKE",
|
|
||||||
v3.FilterOperatorRegex: "match(%s, %s)",
|
|
||||||
v3.FilterOperatorNotRegex: "NOT match(%s, %s)",
|
|
||||||
v3.FilterOperatorContains: "ILIKE",
|
|
||||||
v3.FilterOperatorNotContains: "NOT ILIKE",
|
|
||||||
v3.FilterOperatorExists: "mapContains(%s, '%s')",
|
|
||||||
v3.FilterOperatorNotExists: "NOT mapContains(%s, '%s')",
|
|
||||||
}
|
|
||||||
|
|
||||||
func getClickHouseTracesColumnType(columnType v3.AttributeKeyType) string {
|
|
||||||
if columnType == v3.AttributeKeyTypeResource {
|
|
||||||
return "resources"
|
|
||||||
}
|
|
||||||
return "attributes"
|
|
||||||
}
|
|
||||||
|
|
||||||
func getClickHouseTracesColumnDataType(columnDataType v3.AttributeKeyDataType) string {
|
|
||||||
if columnDataType == v3.AttributeKeyDataTypeFloat64 || columnDataType == v3.AttributeKeyDataTypeInt64 {
|
|
||||||
return "number"
|
|
||||||
}
|
|
||||||
if columnDataType == v3.AttributeKeyDataTypeBool {
|
|
||||||
return "bool"
|
|
||||||
}
|
|
||||||
return "string"
|
|
||||||
}
|
|
||||||
|
|
||||||
func getColumnName(key v3.AttributeKey) string {
|
|
||||||
// if key present in static return as it is
|
|
||||||
if _, ok := constants.StaticFieldsTraces[key.Key]; ok {
|
|
||||||
return key.Key
|
|
||||||
}
|
|
||||||
|
|
||||||
if !key.IsColumn {
|
|
||||||
keyType := getClickHouseTracesColumnType(key.Type)
|
|
||||||
keyDType := getClickHouseTracesColumnDataType(key.DataType)
|
|
||||||
return fmt.Sprintf("%s_%s['%s']", keyType, keyDType, key.Key)
|
|
||||||
}
|
|
||||||
|
|
||||||
return "`" + utils.GetClickhouseColumnNameV2(string(key.Type), string(key.DataType), key.Key) + "`"
|
|
||||||
}
|
|
||||||
|
|
||||||
// getSelectLabels returns the select labels for the query based on groupBy and aggregateOperator
|
|
||||||
func getSelectLabels(groupBy []v3.AttributeKey) string {
|
|
||||||
var labels []string
|
|
||||||
for _, tag := range groupBy {
|
|
||||||
name := getColumnName(tag)
|
|
||||||
labels = append(labels, fmt.Sprintf(" %s as `%s`", name, tag.Key))
|
|
||||||
}
|
|
||||||
return strings.Join(labels, ",")
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
|
|
||||||
var conditions []string
|
|
||||||
|
|
||||||
if fs != nil && len(fs.Items) != 0 {
|
|
||||||
for _, item := range fs.Items {
|
|
||||||
|
|
||||||
// skip if it's a resource attribute
|
|
||||||
if item.Key.Type == v3.AttributeKeyTypeResource {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
val := item.Value
|
|
||||||
// generate the key
|
|
||||||
columnName := getColumnName(item.Key)
|
|
||||||
var fmtVal string
|
|
||||||
item.Operator = v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
|
|
||||||
if item.Operator != v3.FilterOperatorExists && item.Operator != v3.FilterOperatorNotExists {
|
|
||||||
var err error
|
|
||||||
val, err = utils.ValidateAndCastValue(val, item.Key.DataType)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("invalid value for key %s: %v", item.Key.Key, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if val != nil {
|
|
||||||
fmtVal = utils.ClickHouseFormattedValue(val)
|
|
||||||
}
|
|
||||||
if operator, ok := tracesOperatorMappingV3[item.Operator]; ok {
|
|
||||||
switch item.Operator {
|
|
||||||
case v3.FilterOperatorContains, v3.FilterOperatorNotContains:
|
|
||||||
// we also want to treat %, _ as literals for contains
|
|
||||||
val := utils.QuoteEscapedStringForContains(fmt.Sprintf("%s", item.Value), false)
|
|
||||||
conditions = append(conditions, fmt.Sprintf("%s %s '%%%s%%'", columnName, operator, val))
|
|
||||||
case v3.FilterOperatorRegex, v3.FilterOperatorNotRegex:
|
|
||||||
conditions = append(conditions, fmt.Sprintf(operator, columnName, fmtVal))
|
|
||||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
|
||||||
if item.Key.IsColumn {
|
|
||||||
subQuery, err := tracesV3.ExistsSubQueryForFixedColumn(item.Key, item.Operator)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
conditions = append(conditions, subQuery)
|
|
||||||
} else {
|
|
||||||
cType := getClickHouseTracesColumnType(item.Key.Type)
|
|
||||||
cDataType := getClickHouseTracesColumnDataType(item.Key.DataType)
|
|
||||||
col := fmt.Sprintf("%s_%s", cType, cDataType)
|
|
||||||
conditions = append(conditions, fmt.Sprintf(operator, col, item.Key.Key))
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
conditions = append(conditions, fmt.Sprintf("%s %s %s", columnName, operator, fmtVal))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return "", fmt.Errorf("unsupported operator %s", item.Operator)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
queryString := strings.Join(conditions, " AND ")
|
|
||||||
|
|
||||||
return queryString, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleEmptyValuesInGroupBy(groupBy []v3.AttributeKey) (string, error) {
|
|
||||||
// TODO(nitya): in future when we support user based mat column handle them
|
|
||||||
// skipping now as we don't support creating them
|
|
||||||
filterItems := []v3.FilterItem{}
|
|
||||||
if len(groupBy) != 0 {
|
|
||||||
for _, item := range groupBy {
|
|
||||||
if !item.IsColumn {
|
|
||||||
filterItems = append(filterItems, v3.FilterItem{
|
|
||||||
Key: item,
|
|
||||||
Operator: v3.FilterOperatorExists,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(filterItems) != 0 {
|
|
||||||
filterSet := v3.FilterSet{
|
|
||||||
Operator: "AND",
|
|
||||||
Items: filterItems,
|
|
||||||
}
|
|
||||||
return buildTracesFilterQuery(&filterSet)
|
|
||||||
}
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// orderBy returns a string of comma separated tags for order by clause
|
|
||||||
// if there are remaining items which are not present in tags they are also added
|
|
||||||
// if the order is not specified, it defaults to ASC
|
|
||||||
func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]struct{}) []string {
|
|
||||||
var orderBy []string
|
|
||||||
|
|
||||||
for _, item := range items {
|
|
||||||
if item.ColumnName == constants.SigNozOrderByValue {
|
|
||||||
orderBy = append(orderBy, fmt.Sprintf("value %s", item.Order))
|
|
||||||
} else if _, ok := tagLookup[item.ColumnName]; ok {
|
|
||||||
orderBy = append(orderBy, fmt.Sprintf("`%s` %s", item.ColumnName, item.Order))
|
|
||||||
} else if panelType == v3.PanelTypeList {
|
|
||||||
attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn}
|
|
||||||
name := getColumnName(attr)
|
|
||||||
orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return orderBy
|
|
||||||
}
|
|
||||||
|
|
||||||
func orderByAttributeKeyTags(panelType v3.PanelType, items []v3.OrderBy, tags []v3.AttributeKey) string {
|
|
||||||
tagLookup := map[string]struct{}{}
|
|
||||||
for _, v := range tags {
|
|
||||||
tagLookup[v.Key] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
orderByArray := orderBy(panelType, items, tagLookup)
|
|
||||||
|
|
||||||
if len(orderByArray) == 0 {
|
|
||||||
if panelType == v3.PanelTypeList {
|
|
||||||
orderByArray = append(orderByArray, constants.TIMESTAMP+" DESC")
|
|
||||||
} else {
|
|
||||||
orderByArray = append(orderByArray, "value DESC")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
str := strings.Join(orderByArray, ",")
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildTracesQuery(start, end, step int64, mq *v3.BuilderQuery, panelType v3.PanelType, options v3.QBOptions) (string, error) {
|
|
||||||
tracesStart := utils.GetEpochNanoSecs(start)
|
|
||||||
tracesEnd := utils.GetEpochNanoSecs(end)
|
|
||||||
|
|
||||||
// -1800 this is added so that the bucket start considers all the fingerprints.
|
|
||||||
bucketStart := tracesStart/NANOSECOND - 1800
|
|
||||||
bucketEnd := tracesEnd / NANOSECOND
|
|
||||||
|
|
||||||
timeFilter := fmt.Sprintf("(timestamp >= '%d' AND timestamp <= '%d') AND (ts_bucket_start >= %d AND ts_bucket_start <= %d)", tracesStart, tracesEnd, bucketStart, bucketEnd)
|
|
||||||
|
|
||||||
filterSubQuery, err := buildTracesFilterQuery(mq.Filters)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if filterSubQuery != "" {
|
|
||||||
filterSubQuery = " AND " + filterSubQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
emptyValuesInGroupByFilter, err := handleEmptyValuesInGroupBy(mq.GroupBy)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if emptyValuesInGroupByFilter != "" {
|
|
||||||
filterSubQuery = filterSubQuery + " AND " + emptyValuesInGroupByFilter
|
|
||||||
}
|
|
||||||
|
|
||||||
resourceSubQuery, err := resource.BuildResourceSubQuery("signoz_traces", "distributed_traces_v3_resource", bucketStart, bucketEnd, mq.Filters, mq.GroupBy, mq.AggregateAttribute, false)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
// join both the filter clauses
|
|
||||||
if resourceSubQuery != "" {
|
|
||||||
filterSubQuery = filterSubQuery + " AND (resource_fingerprint GLOBAL IN " + resourceSubQuery + ")"
|
|
||||||
}
|
|
||||||
|
|
||||||
// timerange will be sent in epoch millisecond
|
|
||||||
selectLabels := getSelectLabels(mq.GroupBy)
|
|
||||||
if selectLabels != "" {
|
|
||||||
selectLabels = selectLabels + ","
|
|
||||||
}
|
|
||||||
|
|
||||||
orderBy := orderByAttributeKeyTags(panelType, mq.OrderBy, mq.GroupBy)
|
|
||||||
if orderBy != "" {
|
|
||||||
orderBy = " order by " + orderBy
|
|
||||||
}
|
|
||||||
|
|
||||||
if mq.AggregateOperator == v3.AggregateOperatorNoOp {
|
|
||||||
var query string
|
|
||||||
if panelType == v3.PanelTypeTrace {
|
|
||||||
withSubQuery := fmt.Sprintf(constants.TracesExplorerViewSQLSelectWithSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_V3_LOCAL_TABLENAME, timeFilter, filterSubQuery)
|
|
||||||
withSubQuery = tracesV3.AddLimitToQuery(withSubQuery, mq.Limit)
|
|
||||||
if mq.Offset != 0 {
|
|
||||||
withSubQuery = tracesV3.AddOffsetToQuery(withSubQuery, mq.Offset)
|
|
||||||
}
|
|
||||||
query = fmt.Sprintf(constants.TracesExplorerViewSQLSelectBeforeSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_V3) + withSubQuery + ") " + fmt.Sprintf(constants.TracesExplorerViewSQLSelectAfterSubQuery, constants.SIGNOZ_TRACE_DBNAME, constants.SIGNOZ_SPAN_INDEX_V3, timeFilter)
|
|
||||||
} else if panelType == v3.PanelTypeList {
|
|
||||||
if len(mq.SelectColumns) == 0 {
|
|
||||||
return "", fmt.Errorf("select columns cannot be empty for panelType %s", panelType)
|
|
||||||
}
|
|
||||||
// add it to the select labels
|
|
||||||
selectLabels = getSelectLabels(mq.SelectColumns)
|
|
||||||
queryNoOpTmpl := fmt.Sprintf("SELECT timestamp as timestamp_datetime, spanID, traceID,%s ", selectLabels) + "from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_V3 + " where %s %s" + "%s"
|
|
||||||
query = fmt.Sprintf(queryNoOpTmpl, timeFilter, filterSubQuery, orderBy)
|
|
||||||
} else {
|
|
||||||
return "", fmt.Errorf("unsupported aggregate operator %s for panelType %s", mq.AggregateOperator, panelType)
|
|
||||||
}
|
|
||||||
return query, nil
|
|
||||||
// ---- NOOP ends here ----
|
|
||||||
}
|
|
||||||
|
|
||||||
having := tracesV3.Having(mq.Having)
|
|
||||||
if having != "" {
|
|
||||||
having = " having " + having
|
|
||||||
}
|
|
||||||
|
|
||||||
groupBy := tracesV3.GroupByAttributeKeyTags(panelType, options.GraphLimitQtype, mq.GroupBy...)
|
|
||||||
if groupBy != "" {
|
|
||||||
groupBy = " group by " + groupBy
|
|
||||||
}
|
|
||||||
|
|
||||||
aggregationKey := ""
|
|
||||||
if mq.AggregateAttribute.Key != "" {
|
|
||||||
aggregationKey = getColumnName(mq.AggregateAttribute)
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryTmpl string
|
|
||||||
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
|
||||||
queryTmpl = "SELECT"
|
|
||||||
} else if panelType == v3.PanelTypeTable {
|
|
||||||
queryTmpl =
|
|
||||||
"SELECT "
|
|
||||||
} else if panelType == v3.PanelTypeGraph || panelType == v3.PanelTypeValue {
|
|
||||||
// Select the aggregate value for interval
|
|
||||||
queryTmpl =
|
|
||||||
fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d SECOND) AS ts,", step)
|
|
||||||
}
|
|
||||||
|
|
||||||
queryTmpl = queryTmpl + selectLabels +
|
|
||||||
" %s as value " +
|
|
||||||
"from " + constants.SIGNOZ_TRACE_DBNAME + "." + constants.SIGNOZ_SPAN_INDEX_V3 +
|
|
||||||
" where " + timeFilter + "%s" +
|
|
||||||
"%s%s" +
|
|
||||||
"%s"
|
|
||||||
|
|
||||||
// we don't need value for first query
|
|
||||||
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
|
||||||
queryTmpl = "SELECT " + tracesV3.GetSelectKeys(mq.AggregateOperator, mq.GroupBy) + " from (" + queryTmpl + ")"
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
|
||||||
filterSubQuery = filterSubQuery + " AND " + fmt.Sprintf("(%s) GLOBAL IN (", tracesV3.GetSelectKeys(mq.AggregateOperator, mq.GroupBy)) + "%s)"
|
|
||||||
}
|
|
||||||
|
|
||||||
switch mq.AggregateOperator {
|
|
||||||
case v3.AggregateOperatorRateSum,
|
|
||||||
v3.AggregateOperatorRateMax,
|
|
||||||
v3.AggregateOperatorRateAvg,
|
|
||||||
v3.AggregateOperatorRateMin,
|
|
||||||
v3.AggregateOperatorRate:
|
|
||||||
|
|
||||||
rate := float64(step)
|
|
||||||
if options.PreferRPM {
|
|
||||||
rate = rate / 60.0
|
|
||||||
}
|
|
||||||
|
|
||||||
op := fmt.Sprintf("%s(%s)/%f", tracesV3.AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey, rate)
|
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
|
||||||
return query, nil
|
|
||||||
case
|
|
||||||
v3.AggregateOperatorP05,
|
|
||||||
v3.AggregateOperatorP10,
|
|
||||||
v3.AggregateOperatorP20,
|
|
||||||
v3.AggregateOperatorP25,
|
|
||||||
v3.AggregateOperatorP50,
|
|
||||||
v3.AggregateOperatorP75,
|
|
||||||
v3.AggregateOperatorP90,
|
|
||||||
v3.AggregateOperatorP95,
|
|
||||||
v3.AggregateOperatorP99:
|
|
||||||
op := fmt.Sprintf("quantile(%v)(%s)", tracesV3.AggregateOperatorToPercentile[mq.AggregateOperator], aggregationKey)
|
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
|
||||||
return query, nil
|
|
||||||
case v3.AggregateOperatorAvg, v3.AggregateOperatorSum, v3.AggregateOperatorMin, v3.AggregateOperatorMax:
|
|
||||||
op := fmt.Sprintf("%s(%s)", tracesV3.AggregateOperatorToSQLFunc[mq.AggregateOperator], aggregationKey)
|
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
|
||||||
return query, nil
|
|
||||||
case v3.AggregateOperatorCount:
|
|
||||||
if mq.AggregateAttribute.Key != "" {
|
|
||||||
if mq.AggregateAttribute.IsColumn {
|
|
||||||
subQuery, err := tracesV3.ExistsSubQueryForFixedColumn(mq.AggregateAttribute, v3.FilterOperatorExists)
|
|
||||||
if err == nil {
|
|
||||||
filterSubQuery = fmt.Sprintf("%s AND %s", filterSubQuery, subQuery)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
column := getColumnName(mq.AggregateAttribute)
|
|
||||||
filterSubQuery = fmt.Sprintf("%s AND has(%s, '%s')", filterSubQuery, column, mq.AggregateAttribute.Key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
op := "toFloat64(count())"
|
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
|
||||||
return query, nil
|
|
||||||
case v3.AggregateOperatorCountDistinct:
|
|
||||||
op := fmt.Sprintf("toFloat64(count(distinct(%s)))", aggregationKey)
|
|
||||||
query := fmt.Sprintf(queryTmpl, op, filterSubQuery, groupBy, having, orderBy)
|
|
||||||
return query, nil
|
|
||||||
default:
|
|
||||||
return "", fmt.Errorf("unsupported aggregate operator %s", mq.AggregateOperator)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PrepareTracesQuery returns the query string for traces
|
|
||||||
// start and end are in epoch millisecond
|
|
||||||
// step is in seconds
|
|
||||||
func PrepareTracesQuery(start, end int64, panelType v3.PanelType, mq *v3.BuilderQuery, options v3.QBOptions) (string, error) {
|
|
||||||
// adjust the start and end time to the step interval
|
|
||||||
if panelType == v3.PanelTypeGraph {
|
|
||||||
// adjust the start and end time to the step interval for graph panel types
|
|
||||||
start = start - (start % (mq.StepInterval * 1000))
|
|
||||||
end = end - (end % (mq.StepInterval * 1000))
|
|
||||||
}
|
|
||||||
if options.GraphLimitQtype == constants.FirstQueryGraphLimit {
|
|
||||||
// give me just the group by names
|
|
||||||
query, err := buildTracesQuery(start, end, mq.StepInterval, mq, panelType, options)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
query = tracesV3.AddLimitToQuery(query, mq.Limit)
|
|
||||||
|
|
||||||
return query, nil
|
|
||||||
} else if options.GraphLimitQtype == constants.SecondQueryGraphLimit {
|
|
||||||
query, err := buildTracesQuery(start, end, mq.StepInterval, mq, panelType, options)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return query, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
query, err := buildTracesQuery(start, end, mq.StepInterval, mq, panelType, options)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if panelType == v3.PanelTypeValue {
|
|
||||||
query, err = tracesV3.ReduceToQuery(query, mq.ReduceTo, mq.AggregateOperator)
|
|
||||||
}
|
|
||||||
if panelType == v3.PanelTypeList || panelType == v3.PanelTypeTable {
|
|
||||||
query = tracesV3.AddLimitToQuery(query, mq.Limit)
|
|
||||||
|
|
||||||
if mq.Offset != 0 {
|
|
||||||
query = tracesV3.AddOffsetToQuery(query, mq.Offset)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return query, err
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user