Compare commits

..

10 Commits

Author SHA1 Message Date
Jatinderjit Singh
5b2a27728c remove redundant error in prepareQueryRangeV5 2026-03-26 15:45:56 +05:30
Jatinderjit Singh
ede6a79e21 handle opts in eval_common 2026-03-26 15:45:56 +05:30
Jatinderjit Singh
94291433a4 remove duplicate Rule.String implementations 2026-03-26 15:45:56 +05:30
Jatinderjit Singh
8c73632aa2 remove redundant fields from AnomalyRule 2026-03-26 15:45:56 +05:30
Jatinderjit Singh
fe85efa69d unify rules.Eval implementation 2026-03-26 15:45:56 +05:30
Jatinderjit Singh
1a79e2f2e3 normalize Eval implementations for comparison 2026-03-26 15:45:56 +05:30
primus-bot[bot]
5db0501c02 chore(release): bump to v0.117.1 (#10721)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
Co-authored-by: Priyanshu Shrivastava <priyanshu@signoz.io>
2026-03-26 10:01:46 +00:00
Tushar Vats
73da474563 fix: select column option in export button (#10709)
* fix: all option in trace export

* fix: remove the hack, user can select fields

* fix: hide column selection for trace export
2026-03-26 09:11:23 +00:00
Srikanth Chekuri
028c134ea9 chore: reject empty aggregations in payload regardless of disabled st… (#10720)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* chore: reject empty aggregations in payload regardless of disabled status

* chore: update tests

* chore: count -> count()
2026-03-26 05:14:21 +00:00
Ashwin Bhatkal
31b61a89fd fix: collapsed panels not expanding (#10716)
* fix: collapsed panels not expanding

* fix: breaking logs when ordering by timestamp and not filtering on id
2026-03-26 04:06:18 +00:00
34 changed files with 1282 additions and 2652 deletions

View File

@@ -190,7 +190,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.117.0
image: signoz/signoz:v0.117.1
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.117.0
image: signoz/signoz:v0.117.1
ports:
- "8080:8080" # signoz port
volumes:

View File

@@ -181,7 +181,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.117.0}
image: signoz/signoz:${VERSION:-v0.117.1}
container_name: signoz
ports:
- "8080:8080" # signoz port

View File

@@ -109,7 +109,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.117.0}
image: signoz/signoz:${VERSION:-v0.117.1}
container_name: signoz
ports:
- "8080:8080" # signoz port

View File

@@ -7,29 +7,21 @@ import (
"log/slog"
"math"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
"github.com/SigNoz/signoz/pkg/units"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
@@ -45,16 +37,6 @@ const (
type AnomalyRule struct {
*baserules.BaseRule
mtx sync.Mutex
reader interfaces.Reader
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
querierV5 querierV5.Querier
provider anomaly.Provider
providerV2 anomalyV2.Provider
@@ -103,14 +85,6 @@ func NewAnomalyRule(
logger.Info("using seasonality", "seasonality", t.seasonality.String())
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.reader = reader
if t.seasonality == anomaly.SeasonalityHourly {
t.provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
@@ -148,7 +122,6 @@ func NewAnomalyRule(
)
}
t.querierV5 = querierV5
t.version = p.Version
t.logger = logger
return &t, nil
@@ -333,14 +306,8 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
}
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
prevState := r.State()
valueFormatter := units.FormatterFromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
@@ -352,220 +319,8 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
return 0, err
}
r.mtx.Lock()
defer r.mtx.Unlock()
resultFPs := map[uint64]struct{}{}
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
ruleReceivers := r.Threshold.GetRuleReceivers()
ruleReceiverMap := make(map[string][]string)
for _, value := range ruleReceivers {
ruleReceiverMap[value.Name] = value.Channels
opts := baserules.EvalVectorOptions{
DeleteLabels: []string{labels.MetricNameLabel, labels.TemporalityLabel},
}
for _, smpl := range res {
l := make(map[string]string, len(smpl.Metric))
for _, lbl := range smpl.Metric {
l[lbl.Name] = lbl.Value
}
value := valueFormatter.Format(smpl.V, r.Unit())
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
// Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system.
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
// utility function to apply go template on labels and annotations
expand := func(text string) string {
tmpl := ruletypes.NewTemplateExpander(
ctx,
defs+text,
"__alert_"+r.Name(),
tmplData,
times.Time(timestamp.FromTime(ts)),
nil,
)
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
r.logger.ErrorContext(ctx, "Expanding alert template failed", errors.Attr(err), "data", tmplData, "rule_name", r.Name())
}
return result
}
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel)
resultLabels := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel).Labels()
for name, value := range r.Labels().Map() {
lb.Set(name, expand(value))
}
lb.Set(labels.AlertNameLabel, r.Name())
lb.Set(labels.AlertRuleIdLabel, r.ID())
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
annotations := make(labels.Labels, 0, len(r.Annotations().Map()))
for name, value := range r.Annotations().Map() {
annotations = append(annotations, labels.Label{Name: name, Value: expand(value)})
}
if smpl.IsMissing {
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
lb.Set(labels.NoDataLabel, "true")
}
lbs := lb.Labels()
h := lbs.Hash()
resultFPs[h] = struct{}{}
if _, ok := alerts[h]; ok {
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", "rule_id", r.ID(), "alert", alerts[h])
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
return 0, err
}
alerts[h] = &ruletypes.Alert{
Labels: lbs,
QueryResultLables: resultLabels,
Annotations: annotations,
ActiveAt: ts,
State: model.StatePending,
Value: smpl.V,
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Missing: smpl.IsMissing,
IsRecovering: smpl.IsRecovering,
}
}
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value
alert.Annotations = a.Annotations
// Update the recovering and missing state of existing alert
alert.IsRecovering = a.IsRecovering
alert.Missing = a.Missing
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
continue
}
r.Active[h] = a
}
itemsToAdd := []model.RuleStateHistory{}
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "labels", a.Labels)
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager.
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
delete(r.Active, fp)
}
if a.State != model.StateInactive {
a.State = model.StateInactive
a.ResolvedAt = ts
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: model.StateInactive,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
if a.Missing {
state = model.StateNoData
}
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
// We need to change firing alert to recovering if the returned sample meets recovery threshold
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
// We need to change recovering alerts to firing if the returned sample meets target threshold
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
// in any of the above case we need to update the status of alert
if changeFiringToRecovering || changeRecoveringToFiring {
state := model.StateRecovering
if changeRecoveringToFiring {
state = model.StateFiring
}
a.State = state
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
currentState := r.State()
overallStateChanged := currentState != prevState
for idx, item := range itemsToAdd {
item.OverallStateChanged = overallStateChanged
item.OverallState = currentState
itemsToAdd[idx] = item
}
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
return len(r.Active), nil
}
func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: r.EvalWindow(),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),
}
byt, err := json.Marshal(ar)
if err != nil {
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
}
return string(byt)
return r.EvalVector(ctx, ts, res, opts)
}

View File

@@ -116,7 +116,12 @@ describe.each([
expect(screen.getByRole('dialog')).toBeInTheDocument();
expect(screen.getByText('FORMAT')).toBeInTheDocument();
expect(screen.getByText('Number of Rows')).toBeInTheDocument();
expect(screen.getByText('Columns')).toBeInTheDocument();
if (dataSource === DataSource.TRACES) {
expect(screen.queryByText('Columns')).not.toBeInTheDocument();
} else {
expect(screen.getByText('Columns')).toBeInTheDocument();
}
});
it('allows changing export format', () => {
@@ -146,6 +151,17 @@ describe.each([
});
it('allows changing columns scope', () => {
if (dataSource === DataSource.TRACES) {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
expect(screen.queryByRole('radio', { name: 'All' })).not.toBeInTheDocument();
expect(
screen.queryByRole('radio', { name: 'Selected' }),
).not.toBeInTheDocument();
return;
}
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
@@ -210,7 +226,12 @@ describe.each([
mockUseQueryBuilder.mockReturnValue({ stagedQuery: mockQuery });
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
// For traces, column scope is always Selected and the radio is hidden
if (dataSource !== DataSource.TRACES) {
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
}
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
@@ -227,6 +248,11 @@ describe.each([
});
it('sends no selectFields when column scope is All', async () => {
// For traces, column scope is always Selected — this test only applies to other sources
if (dataSource === DataSource.TRACES) {
return;
}
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByRole('radio', { name: 'All' }));

View File

@@ -1,5 +1,6 @@
import { useCallback, useMemo, useState } from 'react';
import { Button, Popover, Radio, Tooltip, Typography } from 'antd';
import { TelemetryFieldKey } from 'api/v5/v5';
import { useExportRawData } from 'hooks/useDownloadOptionsMenu/useDownloadOptionsMenu';
import { Download, DownloadIcon, Loader2 } from 'lucide-react';
import { DataSource } from 'types/common/queryBuilder';
@@ -14,10 +15,12 @@ import './DownloadOptionsMenu.styles.scss';
interface DownloadOptionsMenuProps {
dataSource: DataSource;
selectedColumns?: TelemetryFieldKey[];
}
export default function DownloadOptionsMenu({
dataSource,
selectedColumns,
}: DownloadOptionsMenuProps): JSX.Element {
const [exportFormat, setExportFormat] = useState<string>(DownloadFormats.CSV);
const [rowLimit, setRowLimit] = useState<number>(DownloadRowCounts.TEN_K);
@@ -35,9 +38,19 @@ export default function DownloadOptionsMenu({
await handleExportRawData({
format: exportFormat,
rowLimit,
clearSelectColumns: columnsScope === DownloadColumnsScopes.ALL,
clearSelectColumns:
dataSource !== DataSource.TRACES &&
columnsScope === DownloadColumnsScopes.ALL,
selectedColumns,
});
}, [exportFormat, rowLimit, columnsScope, handleExportRawData]);
}, [
exportFormat,
rowLimit,
columnsScope,
selectedColumns,
handleExportRawData,
dataSource,
]);
const popoverContent = useMemo(
() => (
@@ -72,18 +85,22 @@ export default function DownloadOptionsMenu({
</Radio.Group>
</div>
<div className="horizontal-line" />
{dataSource !== DataSource.TRACES && (
<>
<div className="horizontal-line" />
<div className="columns-scope">
<Typography.Text className="title">Columns</Typography.Text>
<Radio.Group
value={columnsScope}
onChange={(e): void => setColumnsScope(e.target.value)}
>
<Radio value={DownloadColumnsScopes.ALL}>All</Radio>
<Radio value={DownloadColumnsScopes.SELECTED}>Selected</Radio>
</Radio.Group>
</div>
<div className="columns-scope">
<Typography.Text className="title">Columns</Typography.Text>
<Radio.Group
value={columnsScope}
onChange={(e): void => setColumnsScope(e.target.value)}
>
<Radio value={DownloadColumnsScopes.ALL}>All</Radio>
<Radio value={DownloadColumnsScopes.SELECTED}>Selected</Radio>
</Radio.Group>
</div>
</>
)}
<Button
type="primary"
@@ -97,7 +114,14 @@ export default function DownloadOptionsMenu({
</Button>
</div>
),
[exportFormat, rowLimit, columnsScope, isDownloading, handleExport],
[
exportFormat,
rowLimit,
columnsScope,
isDownloading,
handleExport,
dataSource,
],
);
return (

View File

@@ -39,7 +39,6 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { parseAsString, useQueryState } from 'nuqs';
import { AppState } from 'store/reducers';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
@@ -53,18 +52,15 @@ import {
import { GlobalReducer } from 'types/reducer/globalTime';
import { v4 as uuidv4 } from 'uuid';
import { convertFiltersToExpression } from '../QueryBuilderV2/utils';
import { VIEW_TYPES, VIEWS } from './constants';
import Containers from './Containers/Containers';
import { HostDetailProps } from './HostMetricDetail.interfaces';
import { HOST_METRICS_LOGS_EXPR_QUERY_KEY } from './HostMetricsLogs/constants';
import HostMetricsLogs from './HostMetricsLogs/HostMetricsLogs';
import HostMetricLogsDetailedView from './HostMetricsLogs/HostMetricLogsDetailedView';
import HostMetricTraces from './HostMetricTraces/HostMetricTraces';
import Metrics from './Metrics/Metrics';
import Processes from './Processes/Processes';
import './HostMetricsDetail.styles.scss';
// eslint-disable-next-line sonarjs/cognitive-complexity
function HostMetricsDetails({
host,
@@ -133,6 +129,10 @@ function HostMetricsDetails({
};
}, [host?.hostName, searchParams]);
const [logFilters, setLogFilters] = useState<IBuilderQuery['filters']>(
initialFilters,
);
const [tracesFilters, setTracesFilters] = useState<IBuilderQuery['filters']>(
initialFilters,
);
@@ -147,6 +147,7 @@ function HostMetricsDetails({
}, [host]);
useEffect(() => {
setLogFilters(initialFilters);
setTracesFilters(initialFilters);
}, [initialFilters]);
@@ -171,6 +172,7 @@ function HostMetricsDetails({
setSearchParams({
...Object.fromEntries(searchParams.entries()),
[INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value,
[INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null),
[INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null),
});
}
@@ -208,30 +210,48 @@ function HostMetricsDetails({
[],
);
const initialLogsExpression = useMemo(
() =>
convertFiltersToExpression({
items: [
{
id: uuidv4(),
key: {
key: 'host.name',
dataType: DataTypes.String,
type: 'resource',
id: 'host.name--string--resource--false',
},
op: '=',
value: host?.hostName || '',
},
],
op: 'AND',
}).expression,
[host?.hostName],
);
const handleChangeLogFilters = useCallback(
(value: IBuilderQuery['filters'], view: VIEWS) => {
setLogFilters((prevFilters) => {
const hostNameFilter = prevFilters?.items?.find(
(item) => item.key?.key === 'host.name',
);
const paginationFilter = value?.items?.find(
(item) => item.key?.key === 'id',
);
const newFilters = value?.items?.filter(
(item) => item.key?.key !== 'id' && item.key?.key !== 'host.name',
);
const [hostMetricLogsExpr] = useQueryState(
HOST_METRICS_LOGS_EXPR_QUERY_KEY,
parseAsString,
if (newFilters && newFilters?.length > 0) {
logEvent(InfraMonitoringEvents.FilterApplied, {
entity: InfraMonitoringEvents.HostEntity,
view: InfraMonitoringEvents.LogsView,
page: InfraMonitoringEvents.DetailedPage,
});
}
const updatedFilters = {
op: 'AND',
items: [
hostNameFilter,
...(newFilters || []),
...(paginationFilter ? [paginationFilter] : []),
].filter((item): item is TagFilterItem => item !== undefined),
};
setSearchParams({
...Object.fromEntries(searchParams.entries()),
[INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(
updatedFilters,
),
[INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view,
});
return updatedFilters;
});
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[],
);
const handleChangeTracesFilters = useCallback(
@@ -288,6 +308,11 @@ function HostMetricsDetails({
});
if (selectedView === VIEW_TYPES.LOGS) {
const filtersWithoutPagination = {
...logFilters,
items: logFilters?.items?.filter((item) => item.key?.key !== 'id') || [],
};
const compositeQuery = {
...initialQueryState,
queryType: 'builder',
@@ -297,11 +322,7 @@ function HostMetricsDetails({
{
...initialQueryBuilderFormValuesMap.logs,
aggregateOperator: LogsAggregatorOperator.NOOP,
filter: { expression: hostMetricLogsExpr },
expression: hostMetricLogsExpr,
having: {
expression: '',
},
filters: filtersWithoutPagination,
},
],
},
@@ -543,11 +564,12 @@ function HostMetricsDetails({
/>
)}
{selectedView === VIEW_TYPES.LOGS && (
<HostMetricsLogs
<HostMetricLogsDetailedView
timeRange={modalTimeRange}
isModalTimeSelection={isModalTimeSelection}
handleTimeChange={handleTimeChange}
initialExpression={initialLogsExpression}
handleChangeLogFilters={handleChangeLogFilters}
logFilters={logFilters}
selectedInterval={selectedInterval}
/>
)}

View File

@@ -1,64 +0,0 @@
.header {
display: flex;
justify-content: flex-end;
padding: var(--spacing-4) 0px;
border-radius: 3px;
}
.logs {
border: 1px solid var(--border);
margin-top: var(--spacing-4);
}
.listContainer {
flex: 1;
height: calc(100vh - 278px) !important;
display: flex;
height: 100%;
:global(.raw-log-content) {
width: 100%;
text-wrap: inherit;
word-wrap: break-word;
}
}
.listCard {
width: 100%;
margin-top: 12px;
:global(.ant-card-body) {
padding: 0;
height: 100%;
width: 100%;
}
}
.logsLoadingSkeleton {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 8px;
padding: 8px 0;
:global(.ant-skeleton-input-sm) {
height: 18px;
}
}
.noLogsFound {
height: 50vh;
width: 100%;
display: flex;
justify-content: center;
align-items: center;
padding: 24px;
box-sizing: border-box;
p {
display: flex;
align-items: center;
gap: 16px;
}
}

View File

@@ -0,0 +1,133 @@
.host-metrics-logs-container {
margin-top: 1rem;
.filter-section {
flex: 1;
.ant-select-selector {
border-radius: 2px;
border: 1px solid var(--bg-slate-400) !important;
background-color: var(--bg-ink-300) !important;
input {
font-size: 12px;
}
.ant-tag .ant-typography {
font-size: 12px;
}
}
}
.host-metrics-logs-header {
display: flex;
justify-content: space-between;
gap: 8px;
padding: 12px;
border-radius: 3px;
border: 1px solid var(--bg-slate-500);
}
.host-metrics-logs {
margin-top: 1rem;
.virtuoso-list {
overflow-y: hidden !important;
&::-webkit-scrollbar {
width: 0.3rem;
height: 0.3rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-300);
}
&::-webkit-scrollbar-thumb:hover {
background: var(--bg-slate-200);
}
.ant-row {
width: fit-content;
}
}
.skeleton-container {
height: 100%;
padding: 16px;
}
}
}
.host-metrics-logs-list-container {
flex: 1;
height: calc(100vh - 272px) !important;
display: flex;
height: 100%;
.raw-log-content {
width: 100%;
text-wrap: inherit;
word-wrap: break-word;
}
}
.host-metrics-logs-list-card {
width: 100%;
margin-top: 12px;
.ant-card-body {
padding: 0;
height: 100%;
width: 100%;
}
}
.logs-loading-skeleton {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 8px;
padding: 8px 0;
.ant-skeleton-input-sm {
height: 18px;
}
}
.no-logs-found {
height: 50vh;
width: 100%;
display: flex;
justify-content: center;
align-items: center;
padding: 24px;
box-sizing: border-box;
.ant-typography {
display: flex;
align-items: center;
gap: 16px;
}
}
.lightMode {
.filter-section {
border-top: 1px solid var(--bg-vanilla-300);
border-bottom: 1px solid var(--bg-vanilla-300);
.ant-select-selector {
border-color: var(--bg-vanilla-300) !important;
background-color: var(--bg-vanilla-100) !important;
color: var(--bg-ink-200);
}
}
}

View File

@@ -0,0 +1,100 @@
import { useMemo } from 'react';
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import {
CustomTimeType,
Time,
} from 'container/TopNav/DateTimeSelectionV2/types';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { VIEWS } from '../constants';
import HostMetricsLogs from './HostMetricsLogs';
import './HostMetricLogs.styles.scss';
interface Props {
timeRange: {
startTime: number;
endTime: number;
};
isModalTimeSelection: boolean;
handleTimeChange: (
interval: Time | CustomTimeType,
dateTimeRange?: [number, number],
) => void;
handleChangeLogFilters: (value: IBuilderQuery['filters'], view: VIEWS) => void;
logFilters: IBuilderQuery['filters'];
selectedInterval: Time;
}
function HostMetricLogsDetailedView({
timeRange,
isModalTimeSelection,
handleTimeChange,
handleChangeLogFilters,
logFilters,
selectedInterval,
}: Props): JSX.Element {
const { currentQuery } = useQueryBuilder();
const updatedCurrentQuery = useMemo(
() => ({
...currentQuery,
builder: {
...currentQuery.builder,
queryData: [
{
...currentQuery.builder.queryData[0],
dataSource: DataSource.LOGS,
aggregateOperator: 'noop',
aggregateAttribute: {
...currentQuery.builder.queryData[0].aggregateAttribute,
},
filters: {
items:
logFilters?.items?.filter((item) => item.key?.key !== 'host.name') ||
[],
op: 'AND',
},
},
],
},
}),
[currentQuery, logFilters?.items],
);
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
return (
<div className="host-metrics-logs-container">
<div className="host-metrics-logs-header">
<div className="filter-section">
{query && (
<QueryBuilderSearch
query={query as IBuilderQuery}
onChange={(value): void => handleChangeLogFilters(value, VIEWS.LOGS)}
disableNavigationShortcuts
/>
)}
</div>
<div className="datetime-section">
<DateTimeSelectionV2
showAutoRefresh
showRefreshText={false}
hideShareModal
isModalTimeSelection={isModalTimeSelection}
onTimeChange={handleTimeChange}
defaultRelativeTime="5m"
modalSelectedInterval={selectedInterval}
modalInitialStartTime={timeRange.startTime * 1000}
modalInitialEndTime={timeRange.endTime * 1000}
/>
</div>
</div>
<HostMetricsLogs timeRange={timeRange} filters={logFilters} />
</div>
);
}
export default HostMetricLogsDetailedView;

View File

@@ -1,161 +1,88 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef } from 'react';
import { useQuery } from 'react-query';
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
import { Card } from 'antd';
import logEvent from 'api/common/logEvent';
import LogDetail from 'components/LogDetail';
import RawLogView from 'components/Logs/RawLogView';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
import { InfraMonitoringEvents } from 'constants/events';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import LogsError from 'container/LogsError/LogsError';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import {
CustomTimeType,
Time,
} from 'container/TopNav/DateTimeSelectionV2/types';
import { getOldLogsOperatorFromNew } from 'hooks/logs/useActiveLog';
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import useScrollToLog from 'hooks/logs/useScrollToLog';
import useDebounce from 'hooks/useDebounce';
import { generateFilterQuery } from 'lib/logs/generateFilterQuery';
import { parseAsString, useQueryState } from 'nuqs';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { ILog } from 'types/api/logs/log';
import { DataSource } from 'types/common/queryBuilder';
import { validateQuery } from 'utils/queryValidationUtils';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import {
getHostLogsQueryPayload,
HOST_METRICS_LOGS_EXPR_QUERY_KEY,
} from './constants';
import { useInfiniteHostMetricLogs } from './hooks';
import { getHostLogsQueryPayload } from './constants';
import NoLogsContainer from './NoLogsContainer';
import styles from './HostMetricLogs.module.scss';
import './HostMetricLogs.styles.scss';
interface Props {
initialExpression: string;
timeRange: {
startTime: number;
endTime: number;
};
isModalTimeSelection: boolean;
handleTimeChange: (
interval: Time | CustomTimeType,
dateTimeRange?: [number, number],
) => void;
selectedInterval: Time;
filters: IBuilderQuery['filters'];
}
const EXPRESSION_DEBOUNCE_TIME_MS = 300;
function HostMetricsLogs({
initialExpression,
timeRange,
isModalTimeSelection,
handleTimeChange,
selectedInterval,
}: Props): JSX.Element {
function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
const virtuosoRef = useRef<VirtuosoHandle>(null);
const [filterExpression, setFilterExpression] = useQueryState(
HOST_METRICS_LOGS_EXPR_QUERY_KEY,
parseAsString,
);
const [inputExpression, setInputExpression] = useState(
filterExpression || initialExpression,
);
useEffect(() => {
// If expression is present in the URL, prefer it and don't override it.
// Otherwise, initialize URL state from the host's default expression.
if (filterExpression) {
setInputExpression(filterExpression);
return;
}
setInputExpression(initialExpression);
setFilterExpression(initialExpression);
}, [filterExpression, initialExpression, setFilterExpression]);
const debouncedFilterExpression = useDebounce(
filterExpression?.trim() || initialExpression,
EXPRESSION_DEBOUNCE_TIME_MS,
);
const {
activeLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
const onAddToQuery = useCallback(
(fieldKey: string, fieldValue: string, operator: string): void => {
handleCloseLogDetail();
const partExpression = generateFilterQuery({
fieldKey,
fieldValue,
type: getOldLogsOperatorFromNew(operator),
});
const newExpression = inputExpression.trim()
? `${inputExpression} AND ${partExpression}`
: partExpression;
setInputExpression(newExpression);
setFilterExpression(newExpression);
},
[inputExpression, setFilterExpression, handleCloseLogDetail],
const basePayload = getHostLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
filters,
);
const handleFilterChange = useCallback(
(expression: string): void => {
setInputExpression(expression);
const validation = validateQuery(expression);
if (validation.isValid) {
setFilterExpression(expression);
logEvent(InfraMonitoringEvents.FilterApplied, {
entity: InfraMonitoringEvents.HostEntity,
view: InfraMonitoringEvents.LogsView,
page: InfraMonitoringEvents.DetailedPage,
});
}
},
[setFilterExpression],
);
const queryData = useMemo(
() =>
getHostLogsQueryPayload({
start: timeRange.startTime,
end: timeRange.endTime,
// this should use inputExpression to show suggestions correctly
// while we don't accept the final expression yet
expression: inputExpression,
}).queryData,
[timeRange.startTime, timeRange.endTime, inputExpression],
);
const {
logs,
hasReachedEndOfLogs,
isPaginating,
currentPage,
setIsPaginating,
handleNewData,
loadMoreLogs,
hasNextPage,
isFetchingNextPage,
isLoading,
isFetching,
isError,
} = useInfiniteHostMetricLogs({
expression: debouncedFilterExpression,
startTime: timeRange.startTime,
endTime: timeRange.endTime,
queryPayload,
} = useHandleLogsPagination({
timeRange,
filters,
excludeFilterKeys: ['host.name'],
basePayload,
});
const { data, isLoading, isFetching, isError } = useQuery({
queryKey: [
'hostMetricsLogs',
timeRange.startTime,
timeRange.endTime,
filters,
currentPage,
],
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
enabled: !!queryPayload,
keepPreviousData: isPaginating,
});
useEffect(() => {
if (data?.payload?.data?.newResult?.data?.result) {
handleNewData(data.payload.data.newResult.data.result);
}
}, [data, handleNewData]);
useEffect(() => {
setIsPaginating(false);
}, [data, setIsPaginating]);
const handleScrollToLog = useScrollToLog({
logs,
virtuosoRef,
@@ -195,21 +122,22 @@ function HostMetricsLogs({
const renderFooter = useCallback(
(): JSX.Element | null => (
<>
{isFetchingNextPage ? (
<div className={styles.logsLoadingSkeleton}> Loading more logs ... </div>
) : !hasNextPage && logs.length > 0 ? (
<div className={styles.logsLoadingSkeleton}> *** End *** </div>
{isFetching ? (
<div className="logs-loading-skeleton"> Loading more logs ... </div>
) : hasReachedEndOfLogs ? (
<div className="logs-loading-skeleton"> *** End *** </div>
) : null}
</>
),
[isFetchingNextPage, hasNextPage, logs.length],
[isFetching, hasReachedEndOfLogs],
);
const renderContent = useMemo(
() => (
<Card bordered={false} className={styles.listCard}>
<Card bordered={false} className="host-metrics-logs-list-card">
<OverlayScrollbar isVirtuoso>
<Virtuoso
className="host-metrics-logs-virtuoso"
key="host-metrics-logs-virtuoso"
ref={virtuosoRef}
data={logs}
@@ -227,55 +155,32 @@ function HostMetricsLogs({
[logs, loadMoreLogs, getItemContent, renderFooter],
);
const showInitialLoading = isLoading || (isFetching && logs.length === 0);
return (
<>
<div className={styles.header}>
<DateTimeSelectionV2
showAutoRefresh
showRefreshText={false}
hideShareModal
isModalTimeSelection={isModalTimeSelection}
onTimeChange={handleTimeChange}
defaultRelativeTime="5m"
modalSelectedInterval={selectedInterval}
modalInitialStartTime={timeRange.startTime * 1000}
modalInitialEndTime={timeRange.endTime * 1000}
<div className="host-metrics-logs">
{isLoading && <LogsLoading />}
{!isLoading && !isError && logs.length === 0 && <NoLogsContainer />}
{isError && !isLoading && <LogsError />}
{!isLoading && !isError && logs.length > 0 && (
<div
className="host-metrics-logs-list-container"
data-log-detail-ignore="true"
>
{renderContent}
</div>
)}
{selectedTab && activeLog && (
<LogDetail
log={activeLog}
onClose={handleCloseLogDetail}
logs={logs}
onNavigateLog={handleSetActiveLog}
selectedTab={selectedTab}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
onScrollToLog={handleScrollToLog}
/>
</div>
<QuerySearch
queryData={queryData}
onChange={handleFilterChange}
dataSource={DataSource.LOGS}
/>
<div className={styles.logs}>
{showInitialLoading && <LogsLoading />}
{!showInitialLoading && !isError && logs.length === 0 && (
<NoLogsContainer />
)}
{isError && !showInitialLoading && <LogsError />}
{!showInitialLoading && !isError && logs.length > 0 && (
<div className={styles.listContainer} data-log-detail-ignore="true">
{renderContent}
</div>
)}
{selectedTab && activeLog && (
<LogDetail
log={activeLog}
onClose={handleCloseLogDetail}
logs={logs}
onNavigateLog={handleSetActiveLog}
selectedTab={selectedTab}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
onScrollToLog={handleScrollToLog}
/>
)}
</div>
</>
)}
</div>
);
}

View File

@@ -1,15 +1,16 @@
import { Color } from '@signozhq/design-tokens';
import { Typography } from 'antd';
import { Ghost } from 'lucide-react';
import styles from './HostMetricLogs.module.scss';
const { Text } = Typography;
export default function NoLogsContainer(): React.ReactElement {
return (
<div className={styles.noLogsFound}>
<p>
<div className="no-logs-found">
<Text type="secondary">
<Ghost size={24} color={Color.BG_AMBER_500} /> No logs found for this host
in the selected time range.
</p>
</Text>
</div>
);
}

View File

@@ -1,975 +0,0 @@
import { VirtuosoMockContext } from 'react-virtuoso';
import { ENVIRONMENT } from 'constants/env';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
import { NuqsTestingAdapter } from 'nuqs/adapters/testing';
import { act, render, screen, userEvent, waitFor } from 'tests/test-utils';
import HostMetricsLogs from '../HostMetricsLogs';
jest.mock('react-virtuoso', () => {
const actual = jest.requireActual('react-virtuoso');
return {
...actual,
Virtuoso: ({
data,
itemContent,
endReached,
components,
className,
}: {
data?: any[];
itemContent?: (index: number, item: any) => React.ReactNode;
endReached?: (index: number) => void;
components?: { Footer?: React.ComponentType };
className?: string;
}): JSX.Element => (
<div data-testid="virtuoso-mock" className={className}>
{Array.isArray(data) &&
data.map((item, index) => (
<div key={item?.id ?? index} data-testid={`virtuoso-item-${index}`}>
{itemContent?.(index, item)}
</div>
))}
<button
type="button"
data-testid="virtuoso-end-reached"
onClick={(): void => endReached?.((data?.length || 0) - 1)}
>
endReached
</button>
{components?.Footer ? <components.Footer /> : null}
</div>
),
};
});
const QUERY_RANGE_URL = `${ENVIRONMENT.baseURL}/api/v5/query_range`;
const FIELDS_KEYS_URL = `${ENVIRONMENT.baseURL}/api/v1/fields/keys`;
const FIELDS_VALUES_URL = `${ENVIRONMENT.baseURL}/api/v1/fields/values`;
// Creates a V5 API response structure for raw logs data
// The API response is wrapped in { data: { type: '...', data: { results: [...] } } }
const createLogsResponse = ({
offset = 0,
pageSize = 100,
hasMore = true,
}: {
offset?: number;
pageSize?: number;
hasMore?: boolean;
}): any => {
const itemsForThisPage = hasMore ? pageSize : Math.min(pageSize / 2, 10);
return {
data: {
type: 'raw',
data: {
results: [
{
queryName: 'A',
rows: Array.from({ length: itemsForThisPage }, (_, index) => {
const cumulativeIndex = offset + index;
const baseTimestamp = new Date('2024-02-15T21:20:22Z').getTime();
const currentTimestamp = new Date(
baseTimestamp - cumulativeIndex * 1000,
);
const timestampString = currentTimestamp.toISOString();
const id = `log-id-${cumulativeIndex}`;
const logLevel = ['INFO', 'WARN', 'ERROR'][cumulativeIndex % 3];
const service = ['frontend', 'backend', 'database'][cumulativeIndex % 3];
return {
timestamp: timestampString,
data: {
attributes_bool: {},
attributes_float64: {},
attributes_int64: {},
attributes_string: {
host_name: 'test-host',
log_level: logLevel,
service,
},
body: `${timestampString} ${logLevel} ${service} Log message ${cumulativeIndex}`,
id,
resources_string: {
'host.name': 'test-host',
},
severity_number: [9, 13, 17][cumulativeIndex % 3],
severity_text: logLevel,
span_id: `span-${cumulativeIndex}`,
trace_flags: 0,
trace_id: `trace-${cumulativeIndex}`,
},
};
}),
},
],
},
},
};
};
const createEmptyLogsResponse = (): any => ({
data: {
type: 'raw',
data: {
results: [
{
queryName: 'A',
rows: [],
},
],
},
},
});
const defaultProps = {
initialExpression: 'host_name = "test-host"',
timeRange: {
startTime: 1708000000,
endTime: 1708003600,
},
isModalTimeSelection: false,
handleTimeChange: jest.fn(),
selectedInterval: '15m' as const,
};
// Mock OverlayScrollbar to avoid scroll behavior issues in tests
jest.mock('components/OverlayScrollbar/OverlayScrollbar', () => ({
__esModule: true,
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
<div>{children}</div>
),
}));
jest.mock('container/TopNav/DateTimeSelectionV2/index.tsx', () => ({
__esModule: true,
default: ({
onTimeChange,
}: {
onTimeChange?: (interval: string, dateTimeRange?: [number, number]) => void;
}): JSX.Element => {
return (
<div className="datetime-section" data-testid="datetime-selection">
<button
data-testid="time-picker-btn"
onClick={(): void => {
onTimeChange?.('5m');
}}
>
Select Time
</button>
</div>
);
},
}));
const createFieldKeysResponse = (): any => ({
status: 'success',
data: {
complete: true,
keys: {},
},
});
const createFieldValuesResponse = (): any => ({
status: 'success',
data: {
values: {
stringValues: [],
numberValues: [],
boolValues: [],
},
},
});
const renderComponent = (
props = defaultProps,
searchParams?: Record<string, string>,
): ReturnType<typeof render> =>
render(
<NuqsTestingAdapter searchParams={searchParams} hasMemory>
<VirtuosoMockContext.Provider
value={{ viewportHeight: 600, itemHeight: 50 }}
>
<HostMetricsLogs {...props} />
</VirtuosoMockContext.Provider>
</NuqsTestingAdapter>,
);
describe('HostMetricsLogs', () => {
beforeEach(() => {
window.history.pushState({}, 'Test', '/');
server.use(
rest.get(FIELDS_KEYS_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createFieldKeysResponse())),
),
rest.get(FIELDS_VALUES_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createFieldValuesResponse())),
),
);
});
describe('loading state', () => {
it('should show loading state while fetching logs', async () => {
let resolveRequest: (value: any) => void;
const pendingPromise = new Promise((resolve) => {
resolveRequest = resolve;
});
server.use(
rest.post(QUERY_RANGE_URL, async (_, res, ctx) => {
await pendingPromise;
return res(ctx.status(200), ctx.json(createLogsResponse({})));
}),
);
renderComponent();
expect(screen.getByText('pending_data_placeholder')).toBeInTheDocument();
act(() => {
resolveRequest!(true);
});
});
});
describe('empty state', () => {
it('should show no logs message when no logs are returned', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createEmptyLogsResponse())),
),
);
renderComponent();
await waitFor(() => {
expect(
screen.getByText(/No logs found for this host/i),
).toBeInTheDocument();
});
});
});
describe('error state', () => {
it('should show error state when API returns error', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(500), ctx.json({ error: 'Internal Server Error' })),
),
);
renderComponent();
await waitFor(() => {
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument();
});
});
});
describe('success state', () => {
it('should render logs when API returns data', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
});
it('should render initial expression in QuerySearch editor', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
await waitFor(() => {
const editorText =
document.querySelector('.query-where-clause-editor')?.textContent || '';
expect(editorText).toContain('host_name');
expect(editorText).toContain('test-host');
});
});
it('should render the filter section', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
await waitFor(() => {
expect(
document.querySelector('.code-mirror-where-clause'),
).toBeInTheDocument();
});
});
it('should render date time selection component', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
await waitFor(() => {
// DateTimeSelectionV2 renders a time picker button
expect(document.querySelector('.datetime-section')).toBeInTheDocument();
});
});
});
describe('pagination', () => {
it('should send correct offset for pagination', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
const querySpec = payload.compositeQuery?.queries?.[0]?.spec;
const offset = querySpec?.offset ?? 0;
return res(
ctx.status(200),
ctx.json(
createLogsResponse({
offset,
pageSize: 100,
hasMore: offset === 0,
}),
),
);
}),
);
renderComponent();
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstPayload = requestPayloads[0];
const querySpec = firstPayload.compositeQuery?.queries?.[0]?.spec;
expect(querySpec?.offset).toBe(0);
});
it('should fetch next page when virtuoso endReached is triggered', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
const querySpec = payload.compositeQuery?.queries?.[0]?.spec;
const offset = querySpec?.offset ?? 0;
return res(
ctx.status(200),
ctx.json(
createLogsResponse({
offset,
pageSize: 100,
hasMore: offset === 0,
}),
),
);
}),
);
renderComponent();
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
expect(requestPayloads[0]?.compositeQuery?.queries?.[0]?.spec?.offset).toBe(
0,
);
await userEvent.click(screen.getByTestId('virtuoso-end-reached'));
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(2);
});
expect(requestPayloads[1]?.compositeQuery?.queries?.[0]?.spec?.offset).toBe(
100,
);
});
});
describe('filter expression', () => {
it('should include initial expression in the query', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
renderComponent();
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstPayload = requestPayloads[0];
const querySpec = firstPayload.compositeQuery?.queries?.[0]?.spec;
expect(querySpec?.filter?.expression).toContain('host_name = "test-host"');
});
it('should load expression from URL and persist it in the query', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
const querySpec = payload.compositeQuery?.queries?.[0]?.spec;
const offset = querySpec?.offset ?? 0;
return res(
ctx.status(200),
ctx.json(
createLogsResponse({
offset,
pageSize: 100,
hasMore: offset === 0,
}),
),
);
}),
);
const urlExpression = 'service = "from-url"';
renderComponent(defaultProps, { hostMetricsLogsExpr: urlExpression });
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
expect(
requestPayloads[0]?.compositeQuery?.queries?.[0]?.spec?.filter?.expression,
).toContain(urlExpression);
await userEvent.click(screen.getByTestId('virtuoso-end-reached'));
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(2);
});
expect(
requestPayloads[1]?.compositeQuery?.queries?.[0]?.spec?.filter?.expression,
).toContain(urlExpression);
});
it('should use custom expression when provided', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
const customExpression = 'service = "custom-service"';
renderComponent({
...defaultProps,
initialExpression: customExpression,
});
// Wait for debounce and potential re-renders to settle
await waitFor(
() => {
const hasCustomExpression = requestPayloads.some((payload) => {
const querySpec = payload.compositeQuery?.queries?.[0]?.spec;
return querySpec?.filter?.expression?.includes('custom-service');
});
expect(hasCustomExpression).toBe(true);
},
{ timeout: 2000 },
);
});
});
describe('time range', () => {
it('should include correct time range in the query', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
const customTimeRange = {
startTime: 1700000000,
endTime: 1700003600,
};
renderComponent({
...defaultProps,
timeRange: customTimeRange,
});
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstPayload = requestPayloads[0];
// V5 API expects milliseconds (seconds * 1000)
expect(firstPayload.start).toBe(customTimeRange.startTime * 1000);
expect(firstPayload.end).toBe(customTimeRange.endTime * 1000);
});
});
describe('query structure', () => {
it('should send correct query structure to the API', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
renderComponent();
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstPayload = requestPayloads[0];
const querySpec = firstPayload.compositeQuery?.queries?.[0]?.spec;
expect(querySpec?.signal).toBe('logs');
expect(querySpec?.order).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ name: 'timestamp' }),
direction: 'desc',
}),
]),
);
});
it('should send request type as raw for logs list', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
renderComponent();
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstPayload = requestPayloads[0];
expect(firstPayload.requestType).toBe('raw');
});
it('should include pageSize in the query', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
renderComponent();
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstPayload = requestPayloads[0];
const querySpec = firstPayload.compositeQuery?.queries?.[0]?.spec;
// Should have a limit set for pagination
expect(querySpec?.limit).toBeDefined();
expect(typeof querySpec?.limit).toBe('number');
});
});
describe('component props', () => {
it('should render datetime section with isModalTimeSelection', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent({
...defaultProps,
isModalTimeSelection: true,
});
await waitFor(() => {
expect(document.querySelector('.datetime-section')).toBeInTheDocument();
});
});
it('should render component with handleTimeChange', async () => {
const mockHandleTimeChange = jest.fn();
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent({
...defaultProps,
handleTimeChange: mockHandleTimeChange,
});
await waitFor(() => {
expect(document.querySelector('.datetime-section')).toBeInTheDocument();
});
});
});
describe('log detail interactions', () => {
it('should open log detail drawer when clicking on a log', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
// Wait for logs to render
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
// Click on the first log
const logElement = screen.getByText(/Log message 0/);
await userEvent.click(logElement);
// Log detail drawer should open - it contains "Log details" title
await waitFor(() => {
expect(screen.getByText('Log details')).toBeInTheDocument();
});
});
it('should close log detail drawer when clicking on the same log again', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
// Wait for logs to render
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
// Click on the first log to open
const logElement = screen.getByText(/Log message 0/);
await userEvent.click(logElement);
// Wait for drawer to open
await waitFor(() => {
expect(screen.getByText('Log details')).toBeInTheDocument();
});
// Click on the same log to close (through the close button)
const closeButton = document.querySelector('.ant-drawer-close');
if (closeButton) {
await userEvent.click(closeButton);
}
// Drawer should close
await waitFor(() => {
expect(screen.queryByText('Log details')).not.toBeInTheDocument();
});
});
it('should display log body in detail drawer', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent();
// Wait for logs to render
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
// Click on the first log to open drawer
const logElement = screen.getByText(/Log message 0/);
await userEvent.click(logElement);
// Wait for drawer to open
await waitFor(() => {
expect(screen.getByText('Log details')).toBeInTheDocument();
});
// Verify the drawer tabs are displayed
// The drawer should show the Overview tab
await waitFor(() => {
expect(screen.getByText('Overview')).toBeInTheDocument();
});
// Verify other tabs are present
expect(screen.getByText('JSON')).toBeInTheDocument();
expect(screen.getByText('Context')).toBeInTheDocument();
});
});
describe('log detail filter actions', () => {
it('should apply filter-in from log detail and close the drawer', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
renderComponent();
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
await userEvent.click(screen.getByText(/Log message 0/));
await waitFor(() => {
expect(screen.getByText('Log details')).toBeInTheDocument();
});
const serviceRow = await waitFor(() => {
const attributeNameCells = Array.from(
document.querySelectorAll('.attribute-name'),
);
const serviceCell = attributeNameCells.find((cell) =>
(cell.textContent || '').toLowerCase().includes('service'),
);
const row = serviceCell?.closest('tr');
if (!row) {
throw new Error('Service attribute row not found');
}
return row;
});
const filterButtons = serviceRow.querySelectorAll('button.filter-btn');
expect(filterButtons?.length).toBeGreaterThanOrEqual(2);
await userEvent.click(filterButtons[0] as HTMLButtonElement);
await waitFor(() => {
expect(screen.queryByText('Log details')).not.toBeInTheDocument();
});
await waitFor(
() => {
const matched = requestPayloads.some((payload) => {
const expression =
payload.compositeQuery?.queries?.[0]?.spec?.filter?.expression || '';
return (
(expression.includes('attributes_string.service') ||
expression.includes('service')) &&
expression.includes("('frontend')") &&
expression.includes('IN')
);
});
expect(matched).toBe(true);
},
{ timeout: 2500 },
);
});
it('should apply filter-out from log detail and close the drawer', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
renderComponent();
await waitFor(() => {
expect(screen.getByText(/Log message 0/)).toBeInTheDocument();
});
await userEvent.click(screen.getByText(/Log message 0/));
await waitFor(() => {
expect(screen.getByText('Log details')).toBeInTheDocument();
});
const serviceRow = await waitFor(() => {
const attributeNameCells = Array.from(
document.querySelectorAll('.attribute-name'),
);
const serviceCell = attributeNameCells.find((cell) =>
(cell.textContent || '').toLowerCase().includes('service'),
);
const row = serviceCell?.closest('tr');
if (!row) {
throw new Error('Service attribute row not found');
}
return row;
});
const filterButtons = serviceRow.querySelectorAll('button.filter-btn');
expect(filterButtons?.length).toBeGreaterThanOrEqual(2);
// the second button that represents filter out
await userEvent.click(filterButtons[1] as HTMLButtonElement);
await waitFor(() => {
expect(screen.queryByText('Log details')).not.toBeInTheDocument();
});
await waitFor(
() => {
const matched = requestPayloads.some((payload) => {
const expression =
payload.compositeQuery?.queries?.[0]?.spec?.filter?.expression || '';
return (
(expression.includes('attributes_string.service') ||
expression.includes('service')) &&
expression.includes("('frontend')") &&
(expression.includes('NIN') || expression.includes('NOT_IN'))
);
});
expect(matched).toBe(true);
},
{ timeout: 2500 },
);
});
});
describe('time range change', () => {
it('should use different time ranges for different renders', async () => {
const requestPayloads: any[] = [];
server.use(
rest.post(QUERY_RANGE_URL, async (req, res, ctx) => {
const payload = await req.json();
requestPayloads.push(payload);
return res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 })));
}),
);
// First render with initial time range
const { unmount } = renderComponent();
// Wait for initial fetch
await waitFor(() => {
expect(requestPayloads.length).toBeGreaterThanOrEqual(1);
});
const firstStartTime = requestPayloads[0].start;
expect(firstStartTime).toBe(defaultProps.timeRange.startTime * 1000);
// Unmount and render again with different time range
unmount();
const newTimeRange = {
startTime: 1709000000,
endTime: 1709003600,
};
renderComponent({
...defaultProps,
timeRange: newTimeRange,
});
// Wait for fetch with new time range
await waitFor(() => {
const hasNewTimeRange = requestPayloads.some(
(p) => p.start === newTimeRange.startTime * 1000,
);
expect(hasNewTimeRange).toBe(true);
});
});
it('should call handleTimeChange callback when time picker is clicked', async () => {
const mockHandleTimeChange = jest.fn();
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
renderComponent({
...defaultProps,
handleTimeChange: mockHandleTimeChange,
});
// Wait for component to render
await waitFor(() => {
expect(screen.getByTestId('time-picker-btn')).toBeInTheDocument();
});
// Click the time picker button (from mock)
await userEvent.click(screen.getByTestId('time-picker-btn'));
// Verify the callback was called
expect(mockHandleTimeChange).toHaveBeenCalledWith('5m');
});
});
});

View File

@@ -1,311 +0,0 @@
import { QueryClient, QueryClientProvider } from 'react-query';
import { act, renderHook, waitFor } from '@testing-library/react';
import { ENVIRONMENT } from 'constants/env';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
import { useInfiniteHostMetricLogs } from '../hooks';
const QUERY_RANGE_URL = `${ENVIRONMENT.baseURL}/api/v5/query_range`;
const createLogsResponse = ({
offset = 0,
pageSize = 100,
hasMore = true,
}: {
offset?: number;
pageSize?: number;
hasMore?: boolean;
}): any => {
const itemsForThisPage = hasMore ? pageSize : pageSize / 2;
return {
data: {
type: 'raw',
data: {
results: [
{
queryName: 'A',
rows: Array.from({ length: itemsForThisPage }, (_, index) => {
const cumulativeIndex = offset + index;
return {
timestamp: new Date(Date.now() - cumulativeIndex * 1000).toISOString(),
data: {
body: `Log message ${cumulativeIndex}`,
id: `log-${cumulativeIndex}`,
severity_text: 'INFO',
},
};
}),
},
],
},
},
};
};
const createEmptyResponse = (): any => ({
data: {
type: 'raw',
data: {
results: [
{
queryName: 'A',
rows: [],
},
],
},
},
});
const createWrapper = (): React.FC<{ children: React.ReactNode }> => {
const queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
},
},
});
return function Wrapper({
children,
}: {
children: React.ReactNode;
}): JSX.Element {
return (
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
);
};
};
describe('useInfiniteHostMetricLogs', () => {
const defaultParams = {
expression: 'host_name = "test-host"',
startTime: 1708000000,
endTime: 1708003600,
};
describe('initial state', () => {
it('should return initial loading state', () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.delay(100), ctx.status(200), ctx.json(createLogsResponse({}))),
),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
expect(result.current.isLoading).toBe(true);
expect(result.current.logs).toEqual([]);
});
});
describe('successful data fetching', () => {
it('should return logs after successful fetch', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createLogsResponse({ pageSize: 5 }))),
),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.logs.length).toBe(5);
expect(result.current.isError).toBe(false);
});
it('should set hasNextPage based on response size', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(
ctx.status(200),
ctx.json(createLogsResponse({ pageSize: 100, hasMore: true })),
),
),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.hasNextPage).toBe(true);
});
it('should not have next page when response is smaller than page size', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(
ctx.status(200),
ctx.json(createLogsResponse({ pageSize: 100, hasMore: false })),
),
),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.hasNextPage).toBe(false);
});
});
describe('empty state', () => {
it('should return empty logs array when no data', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(createEmptyResponse())),
),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.logs).toEqual([]);
expect(result.current.hasNextPage).toBe(false);
});
});
describe('error handling', () => {
it('should set isError on API failure', async () => {
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) =>
res(ctx.status(500), ctx.json({ error: 'Internal Server Error' })),
),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
await waitFor(() => {
expect(result.current.isError).toBe(true);
});
expect(result.current.logs).toEqual([]);
});
});
describe('query disabled state', () => {
it('should not fetch when expression is empty', async () => {
const requestCount = { count: 0 };
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) => {
requestCount.count += 1;
return res(ctx.status(200), ctx.json(createLogsResponse({})));
}),
);
const { result } = renderHook(
() =>
useInfiniteHostMetricLogs({
...defaultParams,
expression: '',
}),
{
wrapper: createWrapper(),
},
);
// Wait a bit to ensure no request is made
await new Promise((resolve) => {
setTimeout(resolve, 300);
});
expect(requestCount.count).toBe(0);
expect(result.current.isLoading).toBe(false);
});
});
describe('load more functionality', () => {
it('should fetch next page when loadMoreLogs is called', async () => {
const requestCount = { count: 0 };
server.use(
rest.post(QUERY_RANGE_URL, (_, res, ctx) => {
requestCount.count += 1;
if (requestCount.count === 1) {
return res(
ctx.status(200),
ctx.json(
createLogsResponse({ offset: 0, pageSize: 100, hasMore: true }),
),
);
}
return res(
ctx.status(200),
ctx.json(
createLogsResponse({ offset: 100, pageSize: 100, hasMore: false }),
),
);
}),
);
const { result } = renderHook(
() => useInfiniteHostMetricLogs(defaultParams),
{
wrapper: createWrapper(),
},
);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.logs.length).toBe(100);
expect(result.current.hasNextPage).toBe(true);
expect(requestCount.count).toBe(1);
act(() => {
result.current.loadMoreLogs();
});
await waitFor(() => {
expect(result.current.logs.length).toBe(150);
});
expect(result.current.hasNextPage).toBe(false);
expect(requestCount.count).toBe(2);
});
});
});

View File

@@ -1,5 +1,4 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
@@ -7,82 +6,56 @@ import { EQueryType } from 'types/common/dashboard';
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
import { v4 as uuidv4 } from 'uuid';
export interface HostLogsQueryParams {
start: number;
end: number;
expression: string;
offset?: number;
pageSize?: number;
}
export const getHostLogsQueryPayload = ({
export const getHostLogsQueryPayload = (
start: number,
end: number,
filters: IBuilderQuery['filters'],
): GetQueryResultsProps => ({
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
query: {
clickhouse_sql: [],
promql: [],
builder: {
queryData: [
{
dataSource: DataSource.LOGS,
queryName: 'A',
aggregateOperator: 'noop',
aggregateAttribute: {
id: '------false',
dataType: DataTypes.String,
key: '',
type: '',
},
timeAggregation: 'rate',
spaceAggregation: 'sum',
functions: [],
filters,
expression: 'A',
disabled: false,
stepInterval: 60,
having: [],
limit: null,
orderBy: [
{
columnName: 'timestamp',
order: 'desc',
},
],
groupBy: [],
legend: '',
reduceTo: ReduceOperators.AVG,
offset: 0,
pageSize: 100,
},
],
queryFormulas: [],
queryTraceOperator: [],
},
id: uuidv4(),
queryType: EQueryType.QUERY_BUILDER,
},
start,
end,
expression,
offset = 0,
pageSize = DEFAULT_PER_PAGE_VALUE,
}: HostLogsQueryParams): {
query: GetQueryResultsProps;
queryData: IBuilderQuery;
} => {
const queryData: IBuilderQuery = {
dataSource: DataSource.LOGS,
queryName: 'A',
aggregateOperator: 'noop',
aggregateAttribute: {
id: '------false',
dataType: DataTypes.String,
key: '',
type: '',
},
timeAggregation: 'rate',
spaceAggregation: 'sum',
functions: [],
filter: { expression },
expression,
having: {
expression: '',
},
disabled: false,
stepInterval: 60,
limit: null,
orderBy: [
{
columnName: 'timestamp',
order: 'desc',
},
{
columnName: 'id',
order: 'desc',
},
],
groupBy: [],
legend: '',
reduceTo: ReduceOperators.AVG,
offset,
pageSize,
};
return {
query: {
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
query: {
clickhouse_sql: [],
promql: [],
builder: {
queryData: [queryData],
queryFormulas: [],
queryTraceOperator: [],
},
id: uuidv4(),
queryType: EQueryType.QUERY_BUILDER,
},
start,
end,
},
queryData,
};
};
export const HOST_METRICS_LOGS_EXPR_QUERY_KEY = 'hostMetricsLogsExpr';
});

View File

@@ -1,93 +0,0 @@
import { useCallback, useMemo } from 'react';
import { useInfiniteQuery } from 'react-query';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { ILog } from 'types/api/logs/log';
import { getHostLogsQueryPayload } from './constants';
export function useInfiniteHostMetricLogs({
expression,
startTime,
endTime,
}: {
expression: string;
startTime: number;
endTime: number;
}): {
logs: ILog[];
isLoading: boolean;
isFetching: boolean;
isFetchingNextPage: boolean;
isError: boolean;
hasNextPage: boolean;
loadMoreLogs: () => void;
} {
const {
data,
isLoading,
isFetching,
isFetchingNextPage,
isError,
hasNextPage,
fetchNextPage,
} = useInfiniteQuery({
queryKey: ['hostMetricsLogs', startTime, endTime, expression],
queryFn: async ({ pageParam = 0 }) => {
const { query } = getHostLogsQueryPayload({
start: startTime,
end: endTime,
expression,
offset: pageParam,
pageSize: DEFAULT_PER_PAGE_VALUE,
});
return GetMetricQueryRange(query, ENTITY_VERSION_V5);
},
getNextPageParam: (lastPage, allPages) => {
const list = lastPage?.payload?.data?.newResult?.data?.result?.[0]?.list;
if (!list || list.length < DEFAULT_PER_PAGE_VALUE) {
return undefined;
}
return allPages.length * DEFAULT_PER_PAGE_VALUE;
},
enabled: !!expression,
});
const logs = useMemo<ILog[]>(() => {
if (!data?.pages) {
return [];
}
return data.pages.flatMap((page) => {
const list = page?.payload?.data?.newResult?.data?.result?.[0]?.list;
if (!list) {
return [];
}
return list.map(
(item) =>
({
...item.data,
timestamp: item.timestamp,
} as ILog),
);
});
}, [data?.pages]);
const loadMoreLogs = useCallback(() => {
if (hasNextPage && !isFetchingNextPage) {
fetchNextPage();
}
}, [hasNextPage, isFetchingNextPage, fetchNextPage]);
return {
logs,
isLoading,
isFetching,
isFetchingNextPage,
isError,
hasNextPage: !!hasNextPage,
loadMoreLogs,
};
}

View File

@@ -48,6 +48,7 @@ import DashboardEmptyState from './DashboardEmptyState/DashboardEmptyState';
import GridCard from './GridCard';
import { Card, CardContainer, ReactGridLayout } from './styles';
import {
applyRowCollapse,
hasColumnWidthsChanged,
removeUndefinedValuesFromLayout,
} from './utils';
@@ -268,13 +269,10 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
return;
}
currentWidget.title = newTitle;
const updatedWidgets = selectedDashboard?.data?.widgets?.filter(
(e) => e.id !== currentSelectRowId,
const updatedWidgets = selectedDashboard?.data?.widgets?.map((e) =>
e.id === currentSelectRowId ? { ...e, title: newTitle } : e,
);
updatedWidgets?.push(currentWidget);
const updatedSelectedDashboard: Props = {
id: selectedDashboard.id,
data: {
@@ -316,88 +314,13 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
if (!selectedDashboard) {
return;
}
const rowProperties = { ...currentPanelMap[id] };
const updatedPanelMap = { ...currentPanelMap };
let updatedDashboardLayout = [...dashboardLayout];
if (rowProperties.collapsed === true) {
rowProperties.collapsed = false;
const widgetsInsideTheRow = rowProperties.widgets;
let maxY = 0;
widgetsInsideTheRow.forEach((w) => {
maxY = Math.max(maxY, w.y + w.h);
});
const currentRowWidget = dashboardLayout.find((w) => w.i === id);
if (currentRowWidget && widgetsInsideTheRow.length) {
maxY -= currentRowWidget.h + currentRowWidget.y;
}
const idxCurrentRow = dashboardLayout.findIndex((w) => w.i === id);
for (let j = idxCurrentRow + 1; j < dashboardLayout.length; j++) {
updatedDashboardLayout[j].y += maxY;
if (updatedPanelMap[updatedDashboardLayout[j].i]) {
updatedPanelMap[updatedDashboardLayout[j].i].widgets = updatedPanelMap[
updatedDashboardLayout[j].i
].widgets.map((w) => ({
...w,
y: w.y + maxY,
}));
}
}
updatedDashboardLayout = [...updatedDashboardLayout, ...widgetsInsideTheRow];
} else {
rowProperties.collapsed = true;
const currentIdx = dashboardLayout.findIndex((w) => w.i === id);
let widgetsInsideTheRow: Layout[] = [];
let isPanelMapUpdated = false;
for (let j = currentIdx + 1; j < dashboardLayout.length; j++) {
if (currentPanelMap[dashboardLayout[j].i]) {
rowProperties.widgets = widgetsInsideTheRow;
widgetsInsideTheRow = [];
isPanelMapUpdated = true;
break;
} else {
widgetsInsideTheRow.push(dashboardLayout[j]);
}
}
if (!isPanelMapUpdated) {
rowProperties.widgets = widgetsInsideTheRow;
}
let maxY = 0;
widgetsInsideTheRow.forEach((w) => {
maxY = Math.max(maxY, w.y + w.h);
});
const currentRowWidget = dashboardLayout[currentIdx];
if (currentRowWidget && widgetsInsideTheRow.length) {
maxY -= currentRowWidget.h + currentRowWidget.y;
}
for (let j = currentIdx + 1; j < updatedDashboardLayout.length; j++) {
updatedDashboardLayout[j].y += maxY;
if (updatedPanelMap[updatedDashboardLayout[j].i]) {
updatedPanelMap[updatedDashboardLayout[j].i].widgets = updatedPanelMap[
updatedDashboardLayout[j].i
].widgets.map((w) => ({
...w,
y: w.y + maxY,
}));
}
}
updatedDashboardLayout = updatedDashboardLayout.filter(
(widget) => !rowProperties.widgets.some((w: Layout) => w.i === widget.i),
);
}
setCurrentPanelMap((prev) => ({
...prev,
...updatedPanelMap,
[id]: {
...rowProperties,
},
}));
setDashboardLayout(sortLayout(updatedDashboardLayout));
const { updatedLayout, updatedPanelMap } = applyRowCollapse(
id,
dashboardLayout,
currentPanelMap,
);
setCurrentPanelMap((prev) => ({ ...prev, ...updatedPanelMap }));
setDashboardLayout(sortLayout(updatedLayout));
};
const handleDragStop: ItemCallback = (_, oldItem, newItem): void => {

View File

@@ -0,0 +1,181 @@
import { Layout } from 'react-grid-layout';
import { applyRowCollapse, PanelMap } from '../utils';
// Helper to produce deeply-frozen objects that mimic what zustand/immer returns.
function freeze<T>(obj: T): T {
return JSON.parse(JSON.stringify(obj), (_, v) =>
typeof v === 'object' && v !== null ? Object.freeze(v) : v,
) as T;
}
// ─── fixtures ────────────────────────────────────────────────────────────────
const ROW_ID = 'row1';
/** A layout with one row followed by two widgets. */
function makeLayout(): Layout[] {
return [
{ i: ROW_ID, x: 0, y: 0, w: 12, h: 1 },
{ i: 'w1', x: 0, y: 1, w: 6, h: 4 },
{ i: 'w2', x: 6, y: 1, w: 6, h: 4 },
];
}
/** panelMap where the row is expanded (collapsed = false, widgets = []). */
function makeExpandedPanelMap(): PanelMap {
return {
[ROW_ID]: { collapsed: false, widgets: [] },
};
}
/** panelMap where the row is collapsed (widgets stored inside). */
function makeCollapsedPanelMap(): PanelMap {
return {
[ROW_ID]: {
collapsed: true,
widgets: [
{ i: 'w1', x: 0, y: 1, w: 6, h: 4 },
{ i: 'w2', x: 6, y: 1, w: 6, h: 4 },
],
},
};
}
// ─── frozen-input guard (regression for zustand/immer read-only bug) ──────────
describe('applyRowCollapse does not mutate frozen inputs', () => {
it('does not throw when collapsing a row with frozen layout + panelMap', () => {
expect(() =>
applyRowCollapse(
ROW_ID,
freeze(makeLayout()),
freeze(makeExpandedPanelMap()),
),
).not.toThrow();
});
it('does not throw when expanding a row with frozen layout + panelMap', () => {
// Collapsed layout only has the row item; widgets live in panelMap.
const collapsedLayout = freeze([{ i: ROW_ID, x: 0, y: 0, w: 12, h: 1 }]);
expect(() =>
applyRowCollapse(ROW_ID, collapsedLayout, freeze(makeCollapsedPanelMap())),
).not.toThrow();
});
it('leaves the original layout array untouched after collapse', () => {
const layout = makeLayout();
const originalY = layout[1].y; // w1.y before collapse
applyRowCollapse(ROW_ID, layout, makeExpandedPanelMap());
expect(layout[1].y).toBe(originalY);
});
it('leaves the original panelMap untouched after collapse', () => {
const panelMap = makeExpandedPanelMap();
applyRowCollapse(ROW_ID, makeLayout(), panelMap);
expect(panelMap[ROW_ID].collapsed).toBe(false);
});
});
// ─── collapse behaviour ───────────────────────────────────────────────────────
describe('applyRowCollapse collapsing a row', () => {
it('sets collapsed = true on the row entry', () => {
const { updatedPanelMap } = applyRowCollapse(
ROW_ID,
makeLayout(),
makeExpandedPanelMap(),
);
expect(updatedPanelMap[ROW_ID].collapsed).toBe(true);
});
it('stores the child widgets inside the panelMap entry', () => {
const { updatedPanelMap } = applyRowCollapse(
ROW_ID,
makeLayout(),
makeExpandedPanelMap(),
);
const ids = updatedPanelMap[ROW_ID].widgets.map((w) => w.i);
expect(ids).toContain('w1');
expect(ids).toContain('w2');
});
it('removes child widgets from the returned layout', () => {
const { updatedLayout } = applyRowCollapse(
ROW_ID,
makeLayout(),
makeExpandedPanelMap(),
);
const ids = updatedLayout.map((l) => l.i);
expect(ids).not.toContain('w1');
expect(ids).not.toContain('w2');
expect(ids).toContain(ROW_ID);
});
});
// ─── expand behaviour ─────────────────────────────────────────────────────────
describe('applyRowCollapse expanding a row', () => {
it('sets collapsed = false on the row entry', () => {
const collapsedLayout: Layout[] = [{ i: ROW_ID, x: 0, y: 0, w: 12, h: 1 }];
const { updatedPanelMap } = applyRowCollapse(
ROW_ID,
collapsedLayout,
makeCollapsedPanelMap(),
);
expect(updatedPanelMap[ROW_ID].collapsed).toBe(false);
});
it('restores child widgets to the returned layout', () => {
const collapsedLayout: Layout[] = [{ i: ROW_ID, x: 0, y: 0, w: 12, h: 1 }];
const { updatedLayout } = applyRowCollapse(
ROW_ID,
collapsedLayout,
makeCollapsedPanelMap(),
);
const ids = updatedLayout.map((l) => l.i);
expect(ids).toContain('w1');
expect(ids).toContain('w2');
});
it('restored child widgets appear in both the layout and the panelMap entry', () => {
const collapsedLayout: Layout[] = [{ i: ROW_ID, x: 0, y: 0, w: 12, h: 1 }];
const { updatedLayout, updatedPanelMap } = applyRowCollapse(
ROW_ID,
collapsedLayout,
makeCollapsedPanelMap(),
);
// The previously-stored widgets should now be back in the live layout.
expect(updatedLayout.map((l) => l.i)).toContain('w1');
// The panelMap entry still holds a reference to them (stale until next collapse).
expect(updatedPanelMap[ROW_ID].widgets.map((w) => w.i)).toContain('w1');
});
});
// ─── y-offset adjustment ──────────────────────────────────────────────────────
describe('applyRowCollapse y-offset adjustments for rows below', () => {
it('shifts items below a second row down when the first row expands', () => {
const ROW2 = 'row2';
// Layout: row1 (y=0,h=1) | w1 (y=1,h=4) | row2 (y=5,h=1) | w3 (y=6,h=2)
const layout: Layout[] = [
{ i: ROW_ID, x: 0, y: 0, w: 12, h: 1 },
{ i: 'w1', x: 0, y: 1, w: 12, h: 4 },
{ i: ROW2, x: 0, y: 5, w: 12, h: 1 },
{ i: 'w3', x: 0, y: 6, w: 12, h: 2 },
];
const panelMap: PanelMap = {
[ROW_ID]: {
collapsed: true,
widgets: [{ i: 'w1', x: 0, y: 1, w: 12, h: 4 }],
},
[ROW2]: { collapsed: false, widgets: [] },
};
// Expanding row1 should push row2 and w3 down by the height of w1 (4).
const collapsedLayout = layout.filter((l) => l.i !== 'w1');
const { updatedLayout } = applyRowCollapse(ROW_ID, collapsedLayout, panelMap);
const row2Item = updatedLayout.find((l) => l.i === ROW2);
expect(row2Item?.y).toBe(5 + 4); // shifted by maxY = 4
});
});

View File

@@ -4,6 +4,122 @@ import { isEmpty, isEqual } from 'lodash-es';
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
export type PanelMap = Record<
string,
{ widgets: Layout[]; collapsed: boolean }
>;
export interface RowCollapseResult {
updatedLayout: Layout[];
updatedPanelMap: PanelMap;
}
/**
* Pure function that computes the new layout and panelMap after toggling a
* row's collapsed state. All inputs are treated as immutable — no input object
* is mutated, so it is safe to pass frozen objects from the zustand store.
*/
// eslint-disable-next-line sonarjs/cognitive-complexity
export function applyRowCollapse(
id: string,
dashboardLayout: Layout[],
currentPanelMap: PanelMap,
): RowCollapseResult {
// Deep-copy the row's own properties so we can mutate our local copy.
const rowProperties = {
...currentPanelMap[id],
widgets: [...(currentPanelMap[id]?.widgets ?? [])],
};
// Shallow-copy each entry's widgets array so inner .map() calls are safe.
const updatedPanelMap: PanelMap = Object.fromEntries(
Object.entries(currentPanelMap).map(([k, v]) => [
k,
{ ...v, widgets: [...v.widgets] },
]),
);
let updatedDashboardLayout = [...dashboardLayout];
if (rowProperties.collapsed === true) {
// ── EXPAND ──────────────────────────────────────────────────────────────
rowProperties.collapsed = false;
const widgetsInsideTheRow = rowProperties.widgets;
let maxY = 0;
widgetsInsideTheRow.forEach((w) => {
maxY = Math.max(maxY, w.y + w.h);
});
const currentRowWidget = dashboardLayout.find((w) => w.i === id);
if (currentRowWidget && widgetsInsideTheRow.length) {
maxY -= currentRowWidget.h + currentRowWidget.y;
}
const idxCurrentRow = dashboardLayout.findIndex((w) => w.i === id);
for (let j = idxCurrentRow + 1; j < dashboardLayout.length; j++) {
updatedDashboardLayout[j] = {
...updatedDashboardLayout[j],
y: updatedDashboardLayout[j].y + maxY,
};
if (updatedPanelMap[updatedDashboardLayout[j].i]) {
updatedPanelMap[updatedDashboardLayout[j].i].widgets = updatedPanelMap[
updatedDashboardLayout[j].i
].widgets.map((w) => ({ ...w, y: w.y + maxY }));
}
}
updatedDashboardLayout = [...updatedDashboardLayout, ...widgetsInsideTheRow];
} else {
// ── COLLAPSE ─────────────────────────────────────────────────────────────
rowProperties.collapsed = true;
const currentIdx = dashboardLayout.findIndex((w) => w.i === id);
let widgetsInsideTheRow: Layout[] = [];
let isPanelMapUpdated = false;
for (let j = currentIdx + 1; j < dashboardLayout.length; j++) {
if (currentPanelMap[dashboardLayout[j].i]) {
rowProperties.widgets = widgetsInsideTheRow;
widgetsInsideTheRow = [];
isPanelMapUpdated = true;
break;
} else {
widgetsInsideTheRow.push(dashboardLayout[j]);
}
}
if (!isPanelMapUpdated) {
rowProperties.widgets = widgetsInsideTheRow;
}
let maxY = 0;
widgetsInsideTheRow.forEach((w) => {
maxY = Math.max(maxY, w.y + w.h);
});
const currentRowWidget = dashboardLayout[currentIdx];
if (currentRowWidget && widgetsInsideTheRow.length) {
maxY -= currentRowWidget.h + currentRowWidget.y;
}
for (let j = currentIdx + 1; j < updatedDashboardLayout.length; j++) {
updatedDashboardLayout[j] = {
...updatedDashboardLayout[j],
y: updatedDashboardLayout[j].y + maxY,
};
if (updatedPanelMap[updatedDashboardLayout[j].i]) {
updatedPanelMap[updatedDashboardLayout[j].i].widgets = updatedPanelMap[
updatedDashboardLayout[j].i
].widgets.map((w) => ({ ...w, y: w.y + maxY }));
}
}
updatedDashboardLayout = updatedDashboardLayout.filter(
(widget) => !rowProperties.widgets.some((w: Layout) => w.i === widget.i),
);
}
updatedPanelMap[id] = { ...rowProperties };
return { updatedLayout: updatedDashboardLayout, updatedPanelMap };
}
export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
layout.map((obj) =>
Object.fromEntries(

View File

@@ -89,7 +89,7 @@ function HostsList(): JSX.Element {
...baseQuery,
limit: pageSize,
offset: (currentPage - 1) * pageSize,
filters: filters?.items?.length ? filters : undefined,
filters,
start: Math.floor(minTime / 1000000),
end: Math.floor(maxTime / 1000000),
orderBy,
@@ -97,6 +97,15 @@ function HostsList(): JSX.Element {
}, [pageSize, currentPage, filters, minTime, maxTime, orderBy]);
const queryKey = useMemo(() => {
if (selectedHostName) {
return [
'hostList',
String(pageSize),
String(currentPage),
JSON.stringify(filters),
JSON.stringify(orderBy),
];
}
return [
'hostList',
String(pageSize),
@@ -106,7 +115,15 @@ function HostsList(): JSX.Element {
String(minTime),
String(maxTime),
];
}, [pageSize, currentPage, filters, orderBy, minTime, maxTime]);
}, [
pageSize,
currentPage,
filters,
orderBy,
selectedHostName,
minTime,
maxTime,
]);
const { data, isFetching, isLoading, isError } = useGetHostList(
query as HostListPayload,

View File

@@ -4,7 +4,6 @@ import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import { render } from '@testing-library/react';
import * as useGetHostListHooks from 'hooks/infraMonitoring/useGetHostList';
import { NuqsTestingAdapter } from 'nuqs/adapters/testing';
import * as appContextHooks from 'providers/App/App';
import * as timezoneHooks from 'providers/Timezone';
import store from 'store';
@@ -131,30 +130,26 @@ jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
describe('HostsList', () => {
it('renders hosts list table', () => {
const { container } = render(
<NuqsTestingAdapter>
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>
</NuqsTestingAdapter>,
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(container.querySelector('.hosts-list-table')).toBeInTheDocument();
});
it('renders filters', () => {
const { container } = render(
<NuqsTestingAdapter>
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>
</NuqsTestingAdapter>,
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(container.querySelector('.filters')).toBeInTheDocument();
});

View File

@@ -92,7 +92,10 @@ function LogsActionsContainer({
/>
</div>
<div className="download-options-container">
<DownloadOptionsMenu dataSource={DataSource.LOGS} />
<DownloadOptionsMenu
dataSource={DataSource.LOGS}
selectedColumns={options?.selectColumns}
/>
</div>
<div className="format-options-container">
<LogsFormatOptionsMenu

View File

@@ -42,8 +42,15 @@ function LogsPanelComponent({
setPageSize(value);
setOffset(0);
setRequestData((prev) => {
const newQueryData = { ...prev.query };
newQueryData.builder.queryData[0].pageSize = value;
const newQueryData = {
...prev.query,
builder: {
...prev.query.builder,
queryData: prev.query.builder.queryData.map((qd, i) =>
i === 0 ? { ...qd, pageSize: value } : qd,
),
},
};
return {
...prev,
query: newQueryData,

View File

@@ -42,11 +42,19 @@ function Panel({
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
const initialDataSource = updatedQuery.builder.queryData[0].dataSource;
const updatedQueryForList = {
...updatedQuery,
builder: {
...updatedQuery.builder,
queryData: updatedQuery.builder.queryData.map((qd, i) =>
i === 0 ? { ...qd, pageSize: 10 } : qd,
),
},
};
return {
query: updatedQuery,
query: updatedQueryForList,
graphType: PANEL_TYPES.LIST,
selectedTime: widget.timePreferance || 'GLOBAL_TIME',
tableParams: {

View File

@@ -239,7 +239,10 @@ function ListView({
/>
</div>
<DownloadOptionsMenu dataSource={DataSource.TRACES} />
<DownloadOptionsMenu
dataSource={DataSource.TRACES}
selectedColumns={options?.selectColumns}
/>
<TraceExplorerControls
isLoading={isFetching}

View File

@@ -52,37 +52,44 @@ export const useGetQueryRange: UseGetQueryRange = (
!firstQueryData?.filters?.items.some((filter) => filter.key?.key === 'id') &&
firstQueryData?.orderBy[0].columnName === 'timestamp';
const modifiedRequestData = {
if (
isListWithSingleTimestampOrder &&
firstQueryData?.dataSource === DataSource.LOGS
) {
return {
...requestData,
graphType:
requestData.graphType === PANEL_TYPES.BAR
? PANEL_TYPES.TIME_SERIES
: requestData.graphType,
query: {
...requestData.query,
builder: {
...requestData.query.builder,
queryData: [
{
...firstQueryData,
orderBy: [
...(firstQueryData?.orderBy || []),
{
columnName: 'id',
order: firstQueryData?.orderBy[0]?.order,
},
],
},
],
},
},
};
}
return {
...requestData,
graphType:
requestData.graphType === PANEL_TYPES.BAR
? PANEL_TYPES.TIME_SERIES
: requestData.graphType,
};
// If the query is a list with a single timestamp order, we need to add the id column to the order by clause
if (
isListWithSingleTimestampOrder &&
firstQueryData?.dataSource === DataSource.LOGS
) {
modifiedRequestData.query.builder = {
...requestData.query.builder,
queryData: [
{
...firstQueryData,
orderBy: [
...(firstQueryData?.orderBy || []),
{
columnName: 'id',
order: firstQueryData?.orderBy[0]?.order,
},
],
},
],
};
}
return modifiedRequestData;
}, [requestData]);
const queryKey = useMemo(() => {

View File

@@ -3,7 +3,7 @@ import { useCallback, useState } from 'react';
import { useSelector } from 'react-redux';
import { message } from 'antd';
import { downloadExportData } from 'api/v1/download/downloadExportData';
import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
import { prepareQueryRangePayloadV5, TelemetryFieldKey } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { AppState } from 'store/reducers';
@@ -14,6 +14,7 @@ interface ExportOptions {
format: string;
rowLimit: number;
clearSelectColumns: boolean;
selectedColumns?: TelemetryFieldKey[];
}
interface UseExportRawDataProps {
@@ -42,6 +43,7 @@ export function useExportRawData({
format,
rowLimit,
clearSelectColumns,
selectedColumns,
}: ExportOptions): Promise<void> => {
if (!stagedQuery) {
return;
@@ -50,6 +52,12 @@ export function useExportRawData({
try {
setIsDownloading(true);
const selectColumnsOverride = clearSelectColumns
? {}
: selectedColumns?.length
? { selectColumns: selectedColumns }
: {};
const exportQuery = {
...stagedQuery,
builder: {
@@ -59,7 +67,7 @@ export function useExportRawData({
groupBy: [],
having: { expression: '' },
limit: rowLimit,
...(clearSelectColumns && { selectColumns: [] }),
...selectColumnsOverride,
})),
queryTraceOperator: (stagedQuery.builder.queryTraceOperator || []).map(
(traceOp) => ({
@@ -67,7 +75,7 @@ export function useExportRawData({
groupBy: [],
having: { expression: '' },
limit: rowLimit,
...(clearSelectColumns && { selectColumns: [] }),
...selectColumnsOverride,
}),
),
},

View File

@@ -2,6 +2,7 @@ package rules
import (
"context"
"encoding/json"
"fmt"
"log/slog"
"sync"
@@ -190,6 +191,24 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
return baseRule, nil
}
func (r *BaseRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,
}
byt, err := json.Marshal(ar)
if err != nil {
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
}
return string(byt)
}
func (r *BaseRule) matchType() ruletypes.MatchType {
if r.ruleCondition == nil {
return ruletypes.AtleastOnce

View File

@@ -0,0 +1,239 @@
package rules
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/units"
)
type EvalVectorOptions struct {
DeleteLabels []string
ExtraAnnotations func(ctx context.Context, ts time.Time, metric labels.Labels) []labels.Label
}
func (r *BaseRule) EvalVector(ctx context.Context, ts time.Time, res ruletypes.Vector, opts EvalVectorOptions) (int, error) {
prevState := r.State()
valueFormatter := units.FormatterFromUnit(r.Unit())
r.mtx.Lock()
defer r.mtx.Unlock()
resultFPs := map[uint64]struct{}{}
alerts := make(map[uint64]*ruletypes.Alert, len(res))
ruleReceivers := r.Threshold.GetRuleReceivers()
ruleReceiverMap := make(map[string][]string)
for _, value := range ruleReceivers {
ruleReceiverMap[value.Name] = value.Channels
}
for _, smpl := range res {
l := make(map[string]string, len(smpl.Metric))
for _, lbl := range smpl.Metric {
l[lbl.Name] = lbl.Value
}
r.logger.DebugContext(ctx, "alerting for series", "rule_name", r.Name(), "series", smpl)
value := valueFormatter.Format(smpl.V, r.Unit())
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
// Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system.
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
// utility function to apply go template on labels and annotations
expand := func(text string) string {
tmpl := ruletypes.NewTemplateExpander(
ctx,
defs+text,
"__alert_"+r.Name(),
tmplData,
times.Time(timestamp.FromTime(ts)),
nil,
)
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
r.logger.ErrorContext(ctx, "Expanding alert template failed", "rule_name", r.Name(), errors.Attr(err), "data", tmplData)
}
return result
}
lb := labels.NewBuilder(smpl.Metric).Del(opts.DeleteLabels...).Del()
resultLabels := labels.NewBuilder(smpl.Metric).Del(opts.DeleteLabels...).Labels()
for name, value := range r.labels.Map() {
lb.Set(name, expand(value))
}
lb.Set(labels.AlertNameLabel, r.Name())
lb.Set(labels.AlertRuleIdLabel, r.ID())
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
annotations := make(labels.Labels, 0, len(r.annotations.Map()))
for name, value := range r.annotations.Map() {
annotations = append(annotations, labels.Label{Name: name, Value: expand(value)})
}
if smpl.IsMissing {
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
lb.Set(labels.NoDataLabel, "true")
}
if opts.ExtraAnnotations != nil {
extra := opts.ExtraAnnotations(ctx, ts, smpl.Metric)
annotations = append(annotations, extra...)
}
lbs := lb.Labels()
h := lbs.Hash()
resultFPs[h] = struct{}{}
if _, ok := alerts[h]; ok {
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", "rule_id", r.ID(), "alert", alerts[h])
err := fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
// We have already acquired the lock above hence using SetHealth and SetLastError will deadlock.
r.health = ruletypes.HealthBad
r.lastError = err
return 0, err
}
alerts[h] = &ruletypes.Alert{
Labels: lbs,
QueryResultLables: resultLabels,
Annotations: annotations,
ActiveAt: ts,
State: model.StatePending,
Value: smpl.V,
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Missing: smpl.IsMissing,
IsRecovering: smpl.IsRecovering,
}
}
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value
alert.Annotations = a.Annotations
// Update the recovering and missing state of existing alert
alert.IsRecovering = a.IsRecovering
alert.Missing = a.Missing
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
continue
}
r.Active[h] = a
}
itemsToAdd := []model.RuleStateHistory{}
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "rule_name", r.Name(), "labels", a.Labels)
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager.
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
delete(r.Active, fp)
}
if a.State != model.StateInactive {
r.logger.DebugContext(ctx, "converting firing alert to inActive", "name", r.Name())
a.State = model.StateInactive
a.ResolvedAt = ts
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: model.StateInactive,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
if a.Missing {
state = model.StateNoData
}
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
// We need to change firing alert to recovering if the returned sample meets recovery threshold
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
// We need to change recovering alerts to firing if the returned sample meets target threshold
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
// in any of the above case we need to update the status of alert
if changeFiringToRecovering || changeRecoveringToFiring {
state := model.StateRecovering
if changeRecoveringToFiring {
state = model.StateFiring
}
a.State = state
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
currentState := r.State()
overallStateChanged := currentState != prevState
for idx, item := range itemsToAdd {
item.OverallStateChanged = overallStateChanged
item.OverallState = currentState
itemsToAdd[idx] = item
}
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
r.health = ruletypes.HealthGood
r.lastError = nil
return len(r.Active), nil
}

View File

@@ -2,25 +2,20 @@ package rules
import (
"context"
"encoding/json"
"fmt"
"log/slog"
"time"
"github.com/prometheus/prometheus/model/labels"
plabels "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/units"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -186,232 +181,16 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
}
func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
prevState := r.State()
valueFormatter := units.FormatterFromUnit(r.Unit())
// prepare query, run query get data and filter the data based on the threshold
results, err := r.buildAndRunQuery(ctx, ts)
res, err := r.buildAndRunQuery(ctx, ts)
if err != nil {
return 0, err
}
r.mtx.Lock()
defer r.mtx.Unlock()
resultFPs := map[uint64]struct{}{}
alerts := make(map[uint64]*ruletypes.Alert, len(results))
ruleReceivers := r.Threshold.GetRuleReceivers()
ruleReceiverMap := make(map[string][]string)
for _, value := range ruleReceivers {
ruleReceiverMap[value.Name] = value.Channels
opts := EvalVectorOptions{
DeleteLabels: []string{labels.MetricNameLabel},
}
for _, result := range results {
l := make(map[string]string, len(result.Metric))
for _, lbl := range result.Metric {
l[lbl.Name] = lbl.Value
}
r.logger.DebugContext(ctx, "alerting for series", "rule_name", r.Name(), "series", result)
threshold := valueFormatter.Format(result.Target, result.TargetUnit)
tmplData := ruletypes.AlertTemplateData(l, valueFormatter.Format(result.V, r.Unit()), threshold)
// Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system.
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
expand := func(text string) string {
tmpl := ruletypes.NewTemplateExpander(
ctx,
defs+text,
"__alert_"+r.Name(),
tmplData,
times.Time(timestamp.FromTime(ts)),
nil,
)
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
r.logger.WarnContext(ctx, "Expanding alert template failed", "rule_name", r.Name(), errors.Attr(err), "data", tmplData)
}
return result
}
lb := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel)
resultLabels := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel).Labels()
for name, value := range r.labels.Map() {
lb.Set(name, expand(value))
}
lb.Set(qslabels.AlertNameLabel, r.Name())
lb.Set(qslabels.AlertRuleIdLabel, r.ID())
lb.Set(qslabels.RuleSourceLabel, r.GeneratorURL())
annotations := make(qslabels.Labels, 0, len(r.annotations.Map()))
for name, value := range r.annotations.Map() {
annotations = append(annotations, qslabels.Label{Name: name, Value: expand(value)})
}
if result.IsMissing {
lb.Set(qslabels.AlertNameLabel, "[No data] "+r.Name())
lb.Set(qslabels.NoDataLabel, "true")
}
lbs := lb.Labels()
h := lbs.Hash()
resultFPs[h] = struct{}{}
if _, ok := alerts[h]; ok {
err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels")
// We have already acquired the lock above hence using SetHealth and
// SetLastError will deadlock.
r.health = ruletypes.HealthBad
r.lastError = err
return 0, err
}
alerts[h] = &ruletypes.Alert{
Labels: lbs,
QueryResultLables: resultLabels,
Annotations: annotations,
ActiveAt: ts,
State: model.StatePending,
Value: result.V,
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Missing: result.IsMissing,
IsRecovering: result.IsRecovering,
}
}
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value
alert.Annotations = a.Annotations
// Update the recovering and missing state of existing alert
alert.IsRecovering = a.IsRecovering
alert.Missing = a.Missing
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
continue
}
r.Active[h] = a
}
itemsToAdd := []model.RuleStateHistory{}
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "rule_name", r.Name())
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager.
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
delete(r.Active, fp)
}
if a.State != model.StateInactive {
a.State = model.StateInactive
a.ResolvedAt = ts
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: model.StateInactive,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
})
}
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
if a.Missing {
state = model.StateNoData
}
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
// We need to change firing alert to recovering if the returned sample meets recovery threshold
changeAlertingToRecovering := a.State == model.StateFiring && a.IsRecovering
// We need to change recovering alerts to firing if the returned sample meets target threshold
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
// in any of the above case we need to update the status of alert
if changeAlertingToRecovering || changeRecoveringToFiring {
state := model.StateRecovering
if changeRecoveringToFiring {
state = model.StateFiring
}
a.State = state
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
r.health = ruletypes.HealthGood
r.lastError = err
currentState := r.State()
overallStateChanged := currentState != prevState
for idx, item := range itemsToAdd {
item.OverallStateChanged = overallStateChanged
item.OverallState = currentState
itemsToAdd[idx] = item
}
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
return len(r.Active), nil
}
func (r *PromRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,
}
byt, err := json.Marshal(ar)
if err != nil {
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
}
return string(byt)
return r.EvalVector(ctx, ts, res, opts)
}
func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time.Time, interval time.Duration) (promql.Matrix, error) {
@@ -463,7 +242,7 @@ func toCommonSeries(series promql.Series) v3.Series {
Points: make([]v3.Point, 0),
}
series.Metric.Range(func(lbl labels.Label) {
series.Metric.Range(func(lbl plabels.Label) {
commonSeries.Labels[lbl.Name] = lbl.Value
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
lbl.Name: lbl.Value,

View File

@@ -3,7 +3,6 @@ package rules
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log/slog"
"math"
@@ -24,21 +23,16 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/units"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
@@ -283,7 +277,7 @@ func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time,
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
}
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) *qbtypes.QueryRangeRequest {
r.logger.InfoContext(
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
@@ -302,16 +296,13 @@ func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (
}
req.CompositeQuery.Queries = make([]qbtypes.QueryEnvelope, len(r.Condition().CompositeQuery.Queries))
copy(req.CompositeQuery.Queries, r.Condition().CompositeQuery.Queries)
return req, nil
return req
}
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return ""
}
qr := r.prepareQueryRangeV5(ctx, ts)
start := time.UnixMilli(int64(qr.Start))
end := time.UnixMilli(int64(qr.End))
@@ -348,10 +339,7 @@ func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time,
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return ""
}
qr := r.prepareQueryRangeV5(ctx, ts)
start := time.UnixMilli(int64(qr.Start))
end := time.UnixMilli(int64(qr.End))
@@ -500,10 +488,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
}
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
params := r.prepareQueryRangeV5(ctx, ts)
var results []*v3.Result
@@ -580,13 +565,8 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
}
func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
prevState := r.State()
valueFormatter := units.FormatterFromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
@@ -594,247 +574,35 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
}
if err != nil {
return 0, err
}
r.mtx.Lock()
defer r.mtx.Unlock()
opts := EvalVectorOptions{
DeleteLabels: []string{labels.MetricNameLabel, labels.TemporalityLabel},
ExtraAnnotations: func(ctx context.Context, ts time.Time, smpl labels.Labels) []labels.Label {
host := r.hostFromSource()
if host == "" {
return nil
}
resultFPs := map[uint64]struct{}{}
alerts := make(map[uint64]*ruletypes.Alert, len(res))
ruleReceivers := r.Threshold.GetRuleReceivers()
ruleReceiverMap := make(map[string][]string)
for _, value := range ruleReceivers {
ruleReceiverMap[value.Name] = value.Channels
//// Links with timestamps should go in annotations since labels
//// is used alert grouping, and we want to group alerts with the same
//// label set, but different timestamps, together.
switch r.typ { // DIFF
case ruletypes.AlertTypeTraces:
if link := r.prepareLinksToTraces(ctx, ts, smpl); link != "" {
r.logger.InfoContext(ctx, "adding traces link to annotations", "link", fmt.Sprintf("%s/traces-explorer?%s", host, link))
return []labels.Label{{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", host, link)}}
}
case ruletypes.AlertTypeLogs:
if link := r.prepareLinksToLogs(ctx, ts, smpl); link != "" {
r.logger.InfoContext(ctx, "adding logs link to annotations", "link", fmt.Sprintf("%s/logs/logs-explorer?%s", host, link))
return []labels.Label{{Name: "related_logs", Value: fmt.Sprintf("%s/logs/logs-explorer?%s", host, link)}}
}
}
return nil
},
}
for _, smpl := range res {
l := make(map[string]string, len(smpl.Metric))
for _, lbl := range smpl.Metric {
l[lbl.Name] = lbl.Value
}
value := valueFormatter.Format(smpl.V, r.Unit())
// todo(aniket): handle different threshold
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
// Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system.
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
// utility function to apply go template on labels and annotations
expand := func(text string) string {
tmpl := ruletypes.NewTemplateExpander(
ctx,
defs+text,
"__alert_"+r.Name(),
tmplData,
times.Time(timestamp.FromTime(ts)),
nil,
)
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
r.logger.ErrorContext(ctx, "Expanding alert template failed", errors.Attr(err), "data", tmplData)
}
return result
}
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel)
resultLabels := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel).Labels()
for name, value := range r.labels.Map() {
lb.Set(name, expand(value))
}
lb.Set(labels.AlertNameLabel, r.Name())
lb.Set(labels.AlertRuleIdLabel, r.ID())
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
annotations := make(labels.Labels, 0, len(r.annotations.Map()))
for name, value := range r.annotations.Map() {
annotations = append(annotations, labels.Label{Name: name, Value: expand(value)})
}
if smpl.IsMissing {
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
lb.Set(labels.NoDataLabel, "true")
}
// Links with timestamps should go in annotations since labels
// is used alert grouping, and we want to group alerts with the same
// label set, but different timestamps, together.
switch r.typ {
case ruletypes.AlertTypeTraces:
link := r.prepareLinksToTraces(ctx, ts, smpl.Metric)
if link != "" && r.hostFromSource() != "" {
r.logger.InfoContext(ctx, "adding traces link to annotations", "link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link))
annotations = append(annotations, labels.Label{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
}
case ruletypes.AlertTypeLogs:
link := r.prepareLinksToLogs(ctx, ts, smpl.Metric)
if link != "" && r.hostFromSource() != "" {
r.logger.InfoContext(ctx, "adding logs link to annotations", "link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link))
annotations = append(annotations, labels.Label{Name: "related_logs", Value: fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)})
}
}
lbs := lb.Labels()
h := lbs.Hash()
resultFPs[h] = struct{}{}
if _, ok := alerts[h]; ok {
return 0, fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
}
alerts[h] = &ruletypes.Alert{
Labels: lbs,
QueryResultLables: resultLabels,
Annotations: annotations,
ActiveAt: ts,
State: model.StatePending,
Value: smpl.V,
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Missing: smpl.IsMissing,
IsRecovering: smpl.IsRecovering,
}
}
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value
alert.Annotations = a.Annotations
// Update the recovering and missing state of existing alert
alert.IsRecovering = a.IsRecovering
alert.Missing = a.Missing
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
continue
}
r.Active[h] = a
}
itemsToAdd := []model.RuleStateHistory{}
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "labels", a.Labels)
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager.
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
delete(r.Active, fp)
}
if a.State != model.StateInactive {
r.logger.DebugContext(ctx, "converting firing alert to inActive", "name", r.Name())
a.State = model.StateInactive
a.ResolvedAt = ts
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: model.StateInactive,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
if a.Missing {
state = model.StateNoData
}
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
// We need to change firing alert to recovering if the returned sample meets recovery threshold
changeAlertingToRecovering := a.State == model.StateFiring && a.IsRecovering
// We need to change recovering alerts to firing if the returned sample meets target threshold
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
// in any of the above case we need to update the status of alert
if changeAlertingToRecovering || changeRecoveringToFiring {
state := model.StateRecovering
if changeRecoveringToFiring {
state = model.StateFiring
}
a.State = state
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
currentState := r.State()
overallStateChanged := currentState != prevState
for idx, item := range itemsToAdd {
item.OverallStateChanged = overallStateChanged
item.OverallState = currentState
itemsToAdd[idx] = item
}
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
r.health = ruletypes.HealthGood
r.lastError = err
return len(r.Active), nil
}
func (r *ThresholdRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,
}
byt, err := json.Marshal(ar)
if err != nil {
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
}
return string(byt)
return r.EvalVector(ctx, ts, res, opts)
}

View File

@@ -206,8 +206,11 @@ func (q *QueryBuilderQuery[T]) validateAggregations(cfg validationConfig) error
return nil
}
// At least one aggregation required for non-disabled queries
if len(q.Aggregations) == 0 && !q.Disabled {
// At least one aggregation required for aggregation queries, even if
// they are disabled, usually because they are used in formula
// regardless of use in formula, it's invalid to have empty Aggregations
// for aggregation request
if len(q.Aggregations) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one aggregation is required",

View File

@@ -4,6 +4,7 @@ import (
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -31,7 +32,14 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
Spec: QueryBuilderQuery[MetricAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "test",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationMax,
},
},
Signal: telemetrytypes.SignalMetrics,
},
},
{
@@ -39,7 +47,12 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
Spec: QueryBuilderQuery[LogAggregation]{
Name: "B",
Disabled: true,
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{
Expression: "count()",
},
},
Signal: telemetrytypes.SignalLogs,
},
},
},
@@ -61,7 +74,14 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
Spec: QueryBuilderQuery[MetricAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "test",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationMax,
},
},
Signal: telemetrytypes.SignalMetrics,
},
},
{
@@ -194,7 +214,14 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
Spec: QueryBuilderQuery[MetricAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "test",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationMax,
},
},
Signal: telemetrytypes.SignalMetrics,
},
},
{
@@ -232,7 +259,12 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
Spec: QueryBuilderQuery[LogAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{
Expression: "sum(duration)",
},
},
Signal: telemetrytypes.SignalLogs,
},
},
},
@@ -366,7 +398,12 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
Spec: QueryBuilderQuery[LogAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{
Expression: "count()",
},
},
Signal: telemetrytypes.SignalLogs,
},
},
{
@@ -374,7 +411,12 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
},
},
Signal: telemetrytypes.SignalTraces,
},
},
},
@@ -396,7 +438,12 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
Spec: QueryBuilderQuery[LogAggregation]{
Name: "X",
Disabled: true,
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{
Expression: "count()",
},
},
Signal: telemetrytypes.SignalLogs,
},
},
{
@@ -404,7 +451,14 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
Spec: QueryBuilderQuery[MetricAggregation]{
Name: "X",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "test",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationMax,
},
},
Signal: telemetrytypes.SignalMetrics,
},
},
},
@@ -427,7 +481,9 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
{
Expression: "count()",
},
},
},
},
@@ -581,7 +637,9 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
{
Expression: "count()",
},
},
},
},