mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-09 03:02:20 +00:00
Compare commits
8 Commits
ns/ext-api
...
query-rang
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1251fb7e1d | ||
|
|
b420ca494e | ||
|
|
25e81bef02 | ||
|
|
e4693ce64c | ||
|
|
ca9b3a910a | ||
|
|
9dc7d2389a | ||
|
|
da5860297f | ||
|
|
54fca5ba44 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,11 +1,8 @@
|
||||
|
||||
node_modules
|
||||
|
||||
# editor
|
||||
.vscode
|
||||
!.vscode/settings.json
|
||||
.zed
|
||||
.idea
|
||||
|
||||
deploy/docker/environment_tiny/common_test
|
||||
frontend/node_modules
|
||||
@@ -34,6 +31,8 @@ frontend/yarn-debug.log*
|
||||
frontend/yarn-error.log*
|
||||
frontend/src/constants/env.ts
|
||||
|
||||
.idea
|
||||
|
||||
**/build
|
||||
**/storage
|
||||
**/locust-scripts/__pycache__/
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -22,6 +23,8 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -108,7 +111,22 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
router.Handle("/api/v5/query_range", handler.New(am.ViewAccess(ah.queryRangeV5), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
})).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
|
||||
@@ -762,18 +762,6 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
|
||||
totalTimeSeries: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesMetricMetadataDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -783,29 +771,14 @@ export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
* @type boolean
|
||||
*/
|
||||
isMonotonic: boolean;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesMetricMetadataDTOType;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesStatDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesStatDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -825,11 +798,7 @@ export interface MetricsexplorertypesStatDTO {
|
||||
* @minimum 0
|
||||
*/
|
||||
timeseries: number;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesStatDTOType;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -889,7 +858,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
|
||||
totalValue: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesTreemapRequestDTOMode {
|
||||
export enum MetricsexplorertypesTreemapModeDTO {
|
||||
timeseries = 'timeseries',
|
||||
samples = 'samples',
|
||||
}
|
||||
@@ -904,11 +873,7 @@ export interface MetricsexplorertypesTreemapRequestDTO {
|
||||
* @type integer
|
||||
*/
|
||||
limit: number;
|
||||
/**
|
||||
* @enum timeseries,samples
|
||||
* @type string
|
||||
*/
|
||||
mode: MetricsexplorertypesTreemapRequestDTOMode;
|
||||
mode: MetricsexplorertypesTreemapModeDTO;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
@@ -929,18 +894,6 @@ export interface MetricsexplorertypesTreemapResponseDTO {
|
||||
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -954,22 +907,26 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
* @type string
|
||||
*/
|
||||
metricName: string;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetrictypesTemporalityDTO {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetrictypesTypeDTO {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface PreferencetypesPreferenceDTO {
|
||||
/**
|
||||
* @type array
|
||||
@@ -1388,7 +1345,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email?: string;
|
||||
email: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -1396,7 +1353,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId?: string;
|
||||
orgId: string;
|
||||
}
|
||||
|
||||
export interface TypesPostableInviteDTO {
|
||||
|
||||
@@ -14,6 +14,6 @@ export const VIEW_TYPES = {
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
URL_PATH: 'http.url',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'http_host',
|
||||
SERVER_NAME: 'net.peer.name',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
} as const;
|
||||
|
||||
@@ -638,7 +638,7 @@ export const getEndPointsQueryPayload = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
type: '',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
@@ -685,7 +685,7 @@ export const getEndPointsQueryPayload = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
type: '',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
@@ -733,7 +733,7 @@ export const getEndPointsQueryPayload = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
type: '',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
@@ -780,7 +780,7 @@ export const getEndPointsQueryPayload = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
type: '',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
@@ -1302,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
{
|
||||
id: 'e8a043b7',
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
key: 'net.peer.name',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
},
|
||||
@@ -2198,7 +2198,7 @@ export const getEndPointZeroStateQueryPayload = (
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
@@ -2793,7 +2793,7 @@ export const getStatusCodeBarChartWidgetData = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
type: '',
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
|
||||
type CustomVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function CustomVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
}: CustomVariableInputProps): JSX.Element {
|
||||
const optionsData: (string | number | boolean)[] = useMemo(() => {
|
||||
return sortValues(
|
||||
commaValuesParser(variableData.customValue || ''),
|
||||
variableData.sort,
|
||||
) as (string | number | boolean)[];
|
||||
}, [variableData.customValue, variableData.sort]);
|
||||
|
||||
const {
|
||||
value,
|
||||
defaultValue,
|
||||
enableSelectAll,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
});
|
||||
|
||||
return (
|
||||
<SelectVariableInput
|
||||
variableId={variableData.id}
|
||||
options={optionsData}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
onClear={handleClear}
|
||||
enableSelectAll={enableSelectAll}
|
||||
defaultValue={defaultValue}
|
||||
isMultiSelect={variableData.multiSelect}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(CustomVariableInput);
|
||||
@@ -25,12 +25,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
&.focused {
|
||||
.variable-value {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-value {
|
||||
display: flex;
|
||||
min-width: 120px;
|
||||
@@ -48,6 +42,11 @@
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px; /* 133.333% */
|
||||
|
||||
&:hover,
|
||||
&:focus-within {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-select {
|
||||
@@ -99,12 +98,6 @@
|
||||
|
||||
.lightMode {
|
||||
.variable-item {
|
||||
&.focused {
|
||||
.variable-value {
|
||||
border: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-name {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
@@ -115,6 +108,11 @@
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
&:hover,
|
||||
&:focus-within {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,3 +122,9 @@
|
||||
padding: 4px 12px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.dashboard-variables-selection-container {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { memo, useEffect } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Row } from 'antd';
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
useDashboardVariablesSelector,
|
||||
} from 'hooks/dashboard/useDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -22,7 +21,6 @@ import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
function DashboardVariableSelection(): JSX.Element | null {
|
||||
const {
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
@@ -52,34 +50,36 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
);
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
|
||||
// also trigger when the global time changes
|
||||
useEffect(
|
||||
() => {
|
||||
if (!isEmpty(dependencyData?.order)) {
|
||||
setVariablesToGetUpdated(dependencyData?.order || []);
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[JSON.stringify(dependencyData?.order), minTime, maxTime],
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const dependencyOrderKey = useMemo(
|
||||
() => dependencyData?.order?.join(',') ?? '',
|
||||
[dependencyData?.order],
|
||||
);
|
||||
|
||||
// Trigger refetch when dependency order changes or global time changes
|
||||
useEffect(() => {
|
||||
if (dependencyData?.order && dependencyData.order.length > 0) {
|
||||
setVariablesToGetUpdated(dependencyData?.order || []);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
|
||||
// Performance optimization: For dynamic variables with allSelected=true, we don't store
|
||||
// individual values in localStorage since we can always derive them from available options.
|
||||
// This makes localStorage much lighter and more efficient.
|
||||
const onValueUpdate = (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): void => {
|
||||
if (id) {
|
||||
const onValueUpdate = useCallback(
|
||||
(
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): void => {
|
||||
// For dynamic variables, only store in localStorage when NOT allSelected
|
||||
// This makes localStorage much lighter by avoiding storing all individual values
|
||||
const variable = dashboardVariables?.[id] || dashboardVariables?.[name];
|
||||
const isDynamic = variable?.type === 'DYNAMIC';
|
||||
const variable = dashboardVariables[id] || dashboardVariables[name];
|
||||
const isDynamic = variable.type === 'DYNAMIC';
|
||||
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
|
||||
|
||||
if (allSelected) {
|
||||
@@ -88,41 +88,39 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
updateUrlVariable(name || id, value);
|
||||
}
|
||||
|
||||
if (selectedDashboard) {
|
||||
setSelectedDashboard((prev) => {
|
||||
if (prev) {
|
||||
const oldVariables = prev?.data.variables;
|
||||
// this is added to handle case where we have two different
|
||||
// schemas for variable response
|
||||
if (oldVariables?.[id]) {
|
||||
oldVariables[id] = {
|
||||
...oldVariables[id],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
if (oldVariables?.[name]) {
|
||||
oldVariables[name] = {
|
||||
...oldVariables[name],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
data: {
|
||||
...prev?.data,
|
||||
variables: {
|
||||
...oldVariables,
|
||||
},
|
||||
},
|
||||
setSelectedDashboard((prev) => {
|
||||
if (prev) {
|
||||
const oldVariables = { ...prev?.data.variables };
|
||||
// this is added to handle case where we have two different
|
||||
// schemas for variable response
|
||||
if (oldVariables?.[id]) {
|
||||
oldVariables[id] = {
|
||||
...oldVariables[id],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
}
|
||||
if (oldVariables?.[name]) {
|
||||
oldVariables[name] = {
|
||||
...oldVariables[name],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
data: {
|
||||
...prev?.data,
|
||||
variables: {
|
||||
...oldVariables,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
|
||||
if (dependencyData) {
|
||||
const updatedVariables: string[] = [];
|
||||
@@ -138,11 +136,20 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
|
||||
}
|
||||
}
|
||||
};
|
||||
},
|
||||
[
|
||||
// This can be removed
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dependencyData,
|
||||
updateUrlVariable,
|
||||
setSelectedDashboard,
|
||||
setVariablesToGetUpdated,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<Row style={{ display: 'flex', gap: '12px' }}>
|
||||
<Row className="dashboard-variables-selection-container">
|
||||
{sortedVariablesArray.map((variable) => {
|
||||
const key = `${variable.name}${variable.id}${variable.order}`;
|
||||
|
||||
|
||||
@@ -23,9 +23,9 @@ import { SelectItemStyle } from './styles';
|
||||
import {
|
||||
areArraysEqual,
|
||||
getOptionsForDynamicVariable,
|
||||
getSelectValue,
|
||||
uniqueValues,
|
||||
} from './util';
|
||||
import { getSelectValue } from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
|
||||
@@ -0,0 +1,229 @@
|
||||
import { memo, useCallback, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { isArray, isString } from 'lodash-es';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { variablePropsToPayloadVariables } from '../utils';
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
|
||||
interface QueryVariableInputProps {
|
||||
variableData: IDashboardVariable;
|
||||
existingVariables: Record<string, IDashboardVariable>;
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
function QueryVariableInput({
|
||||
variableData,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
onValueUpdate,
|
||||
}: QueryVariableInputProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
value,
|
||||
defaultValue,
|
||||
enableSelectAll,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
});
|
||||
|
||||
const validVariableUpdate = (): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const getOptions = (variablesRes: VariableResponseProps | null): void => {
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
let valueNotInList = false;
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
if (!newOptionsData.includes(val)) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (
|
||||
isString(variableData.selectedValue) &&
|
||||
!newOptionsData.includes(variableData.selectedValue)
|
||||
) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData.name && variableData.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
variableData.type === 'QUERY' &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
error: string;
|
||||
};
|
||||
}) => {
|
||||
const { details } = error;
|
||||
|
||||
if (details.error) {
|
||||
let message = details.error;
|
||||
if ((details.error ?? '').toString().includes('Syntax error:')) {
|
||||
message =
|
||||
'Please make sure query is valid and dependent variables are selected';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleRetry = useCallback((): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}, [refetch]);
|
||||
|
||||
return (
|
||||
<SelectVariableInput
|
||||
variableId={variableData.id}
|
||||
options={optionsData}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
onClear={handleClear}
|
||||
enableSelectAll={enableSelectAll}
|
||||
defaultValue={defaultValue}
|
||||
isMultiSelect={variableData.multiSelect}
|
||||
// query variable specific, API related props
|
||||
loading={isLoading}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={handleRetry}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(QueryVariableInput);
|
||||
@@ -0,0 +1,134 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Popover, Tooltip, Typography } from 'antd';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ALL_SELECT_VALUE } from '../utils';
|
||||
import { SelectItemStyle } from './styles';
|
||||
|
||||
const errorIconStyle = { margin: '0 0.5rem' };
|
||||
|
||||
interface SelectVariableInputProps {
|
||||
variableId: string;
|
||||
options: (string | number | boolean)[];
|
||||
value: string | string[] | undefined;
|
||||
enableSelectAll: boolean;
|
||||
isMultiSelect: boolean;
|
||||
onChange: (value: string | string[]) => void;
|
||||
onClear: () => void;
|
||||
defaultValue?: string | string[];
|
||||
onDropdownVisibleChange?: (visible: boolean) => void;
|
||||
loading?: boolean;
|
||||
errorMessage?: string | null;
|
||||
onRetry?: () => void;
|
||||
}
|
||||
|
||||
const MAX_TAG_DISPLAY_VALUES = 10;
|
||||
|
||||
function maxTagPlaceholder(
|
||||
omittedValues: { label?: React.ReactNode; value?: string | number }[],
|
||||
): JSX.Element {
|
||||
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
|
||||
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
|
||||
const tooltipText =
|
||||
valuesToShow.map(({ value: v }) => v ?? '').join(', ') +
|
||||
(hasMore ? ` + ${omittedValues.length - MAX_TAG_DISPLAY_VALUES} more` : '');
|
||||
|
||||
return (
|
||||
<Tooltip title={tooltipText}>
|
||||
<span>+ {omittedValues.length} </span>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
function SelectVariableInput({
|
||||
variableId,
|
||||
options,
|
||||
value,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
onClear,
|
||||
loading,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
enableSelectAll,
|
||||
isMultiSelect,
|
||||
defaultValue,
|
||||
}: SelectVariableInputProps): JSX.Element {
|
||||
const selectOptions = useMemo(
|
||||
() =>
|
||||
options.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
})),
|
||||
[options],
|
||||
);
|
||||
|
||||
const commonProps = useMemo(
|
||||
() => ({
|
||||
// main props
|
||||
key: variableId,
|
||||
value,
|
||||
defaultValue,
|
||||
|
||||
// setup props
|
||||
placeholder: 'Select value',
|
||||
className: 'variable-select',
|
||||
popupClassName: 'dropdown-styles',
|
||||
getPopupContainer: popupContainer,
|
||||
style: SelectItemStyle,
|
||||
showSearch: true,
|
||||
bordered: false,
|
||||
|
||||
// dynamic props
|
||||
'data-testid': 'variable-select',
|
||||
onChange,
|
||||
loading,
|
||||
options: selectOptions,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
}),
|
||||
[
|
||||
variableId,
|
||||
defaultValue,
|
||||
onChange,
|
||||
loading,
|
||||
selectOptions,
|
||||
value,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
{isMultiSelect ? (
|
||||
<CustomMultiSelect
|
||||
{...commonProps}
|
||||
placement="bottomLeft"
|
||||
maxTagCount={2}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
maxTagPlaceholder={maxTagPlaceholder}
|
||||
onClear={onClear}
|
||||
enableAllSelection={enableSelectAll}
|
||||
maxTagTextLength={30}
|
||||
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
|
||||
/>
|
||||
) : (
|
||||
<CustomSelect {...commonProps} />
|
||||
)}
|
||||
|
||||
{errorMessage && (
|
||||
<span style={errorIconStyle}>
|
||||
<Popover placement="top" content={<Typography>{errorMessage}</Typography>}>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(SelectVariableInput);
|
||||
@@ -0,0 +1,85 @@
|
||||
import { memo, useCallback, useRef, useState } from 'react';
|
||||
import { Input, InputRef } from 'antd';
|
||||
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
|
||||
type TextboxVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function TextboxVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
}: TextboxVariableInputProps): JSX.Element {
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
if (inputValue === variableData.selectedValue) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
onValueUpdate(variableData.name, variableData.id, inputValue, false);
|
||||
}
|
||||
},
|
||||
[
|
||||
onValueUpdate,
|
||||
variableData.id,
|
||||
variableData.name,
|
||||
variableData.selectedValue,
|
||||
],
|
||||
);
|
||||
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
variableData.defaultValue?.toString()) ??
|
||||
'',
|
||||
);
|
||||
|
||||
const handleInputOnChange = useCallback(
|
||||
(event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setTextboxInputValue(event.target.value);
|
||||
},
|
||||
[setTextboxInputValue],
|
||||
);
|
||||
|
||||
const handleInputOnBlur = useCallback(
|
||||
(event: React.FocusEvent<HTMLInputElement>): void => {
|
||||
const value = event.target.value.trim();
|
||||
// If empty, reset to default value
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
handleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
handleChange(value);
|
||||
}
|
||||
},
|
||||
[handleChange, variableData.defaultValue],
|
||||
);
|
||||
|
||||
const handleInputOnKeyDown = useCallback(
|
||||
(event: React.KeyboardEvent<HTMLInputElement>): void => {
|
||||
if (event.key === 'Enter') {
|
||||
textboxInputRef.current?.blur();
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
return (
|
||||
<Input
|
||||
key={variableData.id}
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
bordered={false}
|
||||
value={textboxInputValue}
|
||||
title={textboxInputValue}
|
||||
onChange={handleInputOnChange}
|
||||
onBlur={handleInputOnBlur}
|
||||
onKeyDown={handleInputOnKeyDown}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(TextboxVariableInput);
|
||||
@@ -1,35 +1,16 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
|
||||
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { memo } from 'react';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ALL_SELECT_VALUE, variablePropsToPayloadVariables } from '../utils';
|
||||
import { SelectItemStyle } from './styles';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
import CustomVariableInput from './CustomVariableInput';
|
||||
import QueryVariableInput from './QueryVariableInput';
|
||||
import TextboxVariableInput from './TextboxVariableInput';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
interface VariableItemProps {
|
||||
export interface VariableItemProps {
|
||||
variableData: IDashboardVariable;
|
||||
existingVariables: Record<string, IDashboardVariable>;
|
||||
onValueUpdate: (
|
||||
@@ -43,488 +24,49 @@ interface VariableItemProps {
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
export const getSelectValue = (
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
variableData: IDashboardVariable,
|
||||
): string | string[] | undefined => {
|
||||
if (Array.isArray(selectedValue)) {
|
||||
if (!variableData.multiSelect && selectedValue.length === 1) {
|
||||
return selectedValue[0]?.toString();
|
||||
}
|
||||
return selectedValue.map((item) => item.toString());
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function VariableItem({
|
||||
variableData,
|
||||
existingVariables,
|
||||
onValueUpdate,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
const [tempSelection, setTempSelection] = useState<
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
// Local state for textbox input to ensure smooth editing experience
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
variableData.defaultValue?.toString()) ??
|
||||
'',
|
||||
);
|
||||
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const validVariableUpdate = (): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// variableData.name is present as the top element or next in the queue - variablesToGetUpdated
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
};
|
||||
|
||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const getOptions = (variablesRes: VariableResponseProps | null): void => {
|
||||
if (variablesRes && variableData.type === 'QUERY') {
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
/* eslint-disable no-useless-escape */
|
||||
|
||||
let valueNotInList = false;
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
const isUsed = newOptionsData.includes(val);
|
||||
|
||||
if (!isUsed) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (isString(variableData.selectedValue)) {
|
||||
const isUsed = newOptionsData.includes(variableData.selectedValue);
|
||||
|
||||
if (!isUsed) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.type === 'QUERY' &&
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData && variableData?.name && variableData?.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
} else if (variableData.type === 'CUSTOM') {
|
||||
const optionsData = sortValues(
|
||||
commaValuesParser(variableData.customValue || ''),
|
||||
variableData.sort,
|
||||
) as never;
|
||||
|
||||
setOptionsData(optionsData);
|
||||
}
|
||||
};
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
variableData.type === 'QUERY' &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
error: string;
|
||||
};
|
||||
}) => {
|
||||
const { details } = error;
|
||||
|
||||
if (details.error) {
|
||||
let message = details.error;
|
||||
if ((details.error ?? '').toString().includes('Syntax error:')) {
|
||||
message =
|
||||
'Please make sure query is valid and dependent variables are selected';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
|
||||
if (
|
||||
value === variableData.selectedValue ||
|
||||
(Array.isArray(value) &&
|
||||
Array.isArray(variableData.selectedValue) &&
|
||||
areArraysEqual(value, variableData.selectedValue))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
// Check if ALL is effectively selected by comparing with available options
|
||||
const isAllSelected =
|
||||
Array.isArray(value) &&
|
||||
value.length > 0 &&
|
||||
optionsData.every((option) => value.includes(option.toString()));
|
||||
|
||||
if (isAllSelected && variableData.showALLOption) {
|
||||
// For ALL selection, pass null to avoid storing values
|
||||
onValueUpdate(variableData.name, variableData.id, optionsData, true);
|
||||
} else {
|
||||
onValueUpdate(variableData.name, variableData.id, value, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
variableData.multiSelect,
|
||||
variableData.selectedValue,
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
onValueUpdate,
|
||||
optionsData,
|
||||
variableData.showALLOption,
|
||||
],
|
||||
);
|
||||
|
||||
// Add a handler for tracking temporary selection changes
|
||||
const handleTempChange = (inputValue: string | string[]): void => {
|
||||
// Store the selection in temporary state while dropdown is open
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
setTempSelection(value);
|
||||
};
|
||||
|
||||
// Handle dropdown visibility changes
|
||||
const handleDropdownVisibleChange = (visible: boolean): void => {
|
||||
// Initialize temp selection when opening dropdown
|
||||
if (visible) {
|
||||
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
|
||||
}
|
||||
// Apply changes when closing dropdown
|
||||
else if (!visible && tempSelection !== undefined) {
|
||||
// Call handleChange with the temporarily stored selection
|
||||
handleChange(tempSelection);
|
||||
setTempSelection(undefined);
|
||||
}
|
||||
};
|
||||
|
||||
// do not debounce the above function as we do not need debounce in select variables
|
||||
const debouncedHandleChange = debounce(handleChange, 500);
|
||||
|
||||
const { selectedValue } = variableData;
|
||||
const selectedValueStringified = useMemo(
|
||||
() => getSelectValue(selectedValue, variableData),
|
||||
[selectedValue, variableData],
|
||||
);
|
||||
|
||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||
|
||||
const selectValue =
|
||||
variableData.allSelected && enableSelectAll
|
||||
? 'ALL'
|
||||
: selectedValueStringified;
|
||||
|
||||
// Apply default value on first render if no selection exists
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const finalSelectedValues = useMemo(() => {
|
||||
if (variableData.multiSelect) {
|
||||
let value = tempSelection || selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
if (variableData.showALLOption) {
|
||||
if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData;
|
||||
}
|
||||
} else if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData?.[0];
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
if (isEmpty(selectedValue)) {
|
||||
if (variableData.defaultValue) {
|
||||
return variableData.defaultValue;
|
||||
}
|
||||
return optionsData[0]?.toString();
|
||||
}
|
||||
|
||||
return selectedValue;
|
||||
}, [
|
||||
variableData.multiSelect,
|
||||
variableData.showALLOption,
|
||||
variableData.defaultValue,
|
||||
selectedValue,
|
||||
tempSelection,
|
||||
optionsData,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
(variableData.multiSelect && !(tempSelection || selectValue)) ||
|
||||
isEmpty(selectValue)
|
||||
) {
|
||||
handleChange(finalSelectedValues as string[] | string);
|
||||
}
|
||||
}, [
|
||||
finalSelectedValues,
|
||||
handleChange,
|
||||
selectValue,
|
||||
tempSelection,
|
||||
variableData.multiSelect,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch options for CUSTOM Type
|
||||
if (variableData.type === 'CUSTOM') {
|
||||
getOptions(null);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variableData.type, variableData.customValue]);
|
||||
const { name, description, type: variableType } = variableData;
|
||||
|
||||
return (
|
||||
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
|
||||
<div className="variable-item">
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
{variableData.description && (
|
||||
<Tooltip title={variableData.description}>
|
||||
${name}
|
||||
{description && (
|
||||
<Tooltip title={description}>
|
||||
<InfoCircleOutlined className="info-icon" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
<div className="variable-value">
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
<Input
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
bordered={false}
|
||||
value={textboxInputValue}
|
||||
title={textboxInputValue}
|
||||
onChange={(e): void => {
|
||||
setTextboxInputValue(e.target.value);
|
||||
}}
|
||||
onFocus={(): void => {
|
||||
setIsTextboxFocused(true);
|
||||
}}
|
||||
onBlur={(e): void => {
|
||||
setIsTextboxFocused(false);
|
||||
const value = e.target.value.trim();
|
||||
// If empty, reset to default value
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
const value = textboxInputValue.trim();
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
textboxInputRef.current?.blur();
|
||||
}
|
||||
}}
|
||||
{variableType === 'TEXTBOX' && (
|
||||
<TextboxVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
) : (
|
||||
optionsData &&
|
||||
(variableData.multiSelect ? (
|
||||
<CustomMultiSelect
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
options={optionsData.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
}))}
|
||||
defaultValue={variableData.defaultValue || selectValue}
|
||||
onChange={handleTempChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
placement="bottomLeft"
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
popupClassName="dropdown-styles"
|
||||
maxTagCount={2}
|
||||
getPopupContainer={popupContainer}
|
||||
value={tempSelection || selectValue}
|
||||
onDropdownVisibleChange={handleDropdownVisibleChange}
|
||||
errorMessage={errorMessage}
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
maxTagPlaceholder={(omittedValues): JSX.Element => {
|
||||
const maxDisplayValues = 10;
|
||||
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
|
||||
const hasMore = omittedValues.length > maxDisplayValues;
|
||||
const tooltipText =
|
||||
valuesToShow.map(({ value }) => value).join(', ') +
|
||||
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
|
||||
|
||||
return (
|
||||
<Tooltip title={tooltipText}>
|
||||
<span>+ {omittedValues.length} </span>
|
||||
</Tooltip>
|
||||
);
|
||||
}}
|
||||
onClear={(): void => {
|
||||
handleChange([]);
|
||||
}}
|
||||
enableAllSelection={enableSelectAll}
|
||||
maxTagTextLength={30}
|
||||
allowClear={selectValue !== ALL_SELECT_VALUE && selectValue !== 'ALL'}
|
||||
onRetry={(): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<CustomSelect
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
defaultValue={variableData.defaultValue || selectValue}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
popupClassName="dropdown-styles"
|
||||
getPopupContainer={popupContainer}
|
||||
options={optionsData.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
}))}
|
||||
value={selectValue}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={(): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
{variableData.type !== 'TEXTBOX' && errorMessage && (
|
||||
<span style={{ margin: '0 0.5rem' }}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<Typography>{errorMessage}</Typography>}
|
||||
>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
{variableType === 'CUSTOM' && (
|
||||
<CustomVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
)}
|
||||
{variableType === 'QUERY' && (
|
||||
<QueryVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
existingVariables={existingVariables}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,201 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { areArraysEqual, getSelectValue } from './util';
|
||||
|
||||
interface UseDashboardVariableSelectHelperParams {
|
||||
variableData: IDashboardVariable;
|
||||
optionsData: (string | number | boolean)[];
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
}
|
||||
|
||||
interface UseDashboardVariableSelectHelperReturn {
|
||||
// State
|
||||
tempSelection: string | string[] | undefined;
|
||||
setTempSelection: React.Dispatch<
|
||||
React.SetStateAction<string | string[] | undefined>
|
||||
>;
|
||||
value: string | string[] | undefined;
|
||||
defaultValue: string | string[] | undefined;
|
||||
|
||||
// Derived values
|
||||
enableSelectAll: boolean;
|
||||
|
||||
// Handlers
|
||||
onChange: (value: string | string[]) => void;
|
||||
onDropdownVisibleChange: (visible: boolean) => void;
|
||||
handleClear: () => void;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
|
||||
const { selectedValue } = variableData;
|
||||
|
||||
const [tempSelection, setTempSelection] = useState<
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
const selectedValueStringified = useMemo(
|
||||
() => getSelectValue(selectedValue, variableData),
|
||||
[selectedValue, variableData],
|
||||
);
|
||||
|
||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||
|
||||
const selectValue =
|
||||
variableData.allSelected && enableSelectAll
|
||||
? 'ALL'
|
||||
: selectedValueStringified;
|
||||
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
|
||||
if (
|
||||
value === variableData.selectedValue ||
|
||||
(Array.isArray(value) &&
|
||||
Array.isArray(variableData.selectedValue) &&
|
||||
areArraysEqual(value, variableData.selectedValue))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
// Check if ALL is effectively selected by comparing with available options
|
||||
const isAllSelected =
|
||||
Array.isArray(value) &&
|
||||
value.length > 0 &&
|
||||
optionsData.every((option) => value.includes(option.toString()));
|
||||
|
||||
if (isAllSelected && variableData.showALLOption) {
|
||||
// For ALL selection, pass optionsData as the value and set allSelected to true
|
||||
onValueUpdate(variableData.name, variableData.id, optionsData, true);
|
||||
} else {
|
||||
onValueUpdate(variableData.name, variableData.id, value, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
variableData.multiSelect,
|
||||
variableData.selectedValue,
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
variableData.showALLOption,
|
||||
onValueUpdate,
|
||||
optionsData,
|
||||
],
|
||||
);
|
||||
|
||||
const handleTempChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
// Store the selection in temporary state while dropdown is open
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
setTempSelection(value);
|
||||
},
|
||||
[variableData.multiSelect],
|
||||
);
|
||||
|
||||
// Apply default value on first render if no selection exists
|
||||
const finalSelectedValues = useMemo(() => {
|
||||
if (variableData.multiSelect) {
|
||||
let value = tempSelection || selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
if (variableData.showALLOption) {
|
||||
if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData;
|
||||
}
|
||||
} else if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData?.[0];
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
if (isEmpty(selectedValue)) {
|
||||
if (variableData.defaultValue) {
|
||||
return variableData.defaultValue;
|
||||
}
|
||||
return optionsData[0]?.toString();
|
||||
}
|
||||
|
||||
return selectedValue;
|
||||
}, [
|
||||
variableData.multiSelect,
|
||||
variableData.showALLOption,
|
||||
variableData.defaultValue,
|
||||
selectedValue,
|
||||
tempSelection,
|
||||
optionsData,
|
||||
]);
|
||||
|
||||
// Apply default values when needed
|
||||
useEffect(() => {
|
||||
if (
|
||||
(variableData.multiSelect && !(tempSelection || selectValue)) ||
|
||||
isEmpty(selectValue)
|
||||
) {
|
||||
handleChange(finalSelectedValues as string[] | string);
|
||||
}
|
||||
}, [
|
||||
finalSelectedValues,
|
||||
handleChange,
|
||||
selectValue,
|
||||
tempSelection,
|
||||
variableData.multiSelect,
|
||||
]);
|
||||
|
||||
// Handle dropdown visibility changes
|
||||
const onDropdownVisibleChange = useCallback(
|
||||
(visible: boolean): void => {
|
||||
// Initialize temp selection when opening dropdown
|
||||
if (visible) {
|
||||
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
|
||||
}
|
||||
// Apply changes when closing dropdown
|
||||
else if (!visible && tempSelection !== undefined) {
|
||||
// Call handleChange with the temporarily stored selection
|
||||
handleChange(tempSelection);
|
||||
setTempSelection(undefined);
|
||||
}
|
||||
},
|
||||
[variableData, tempSelection, handleChange],
|
||||
);
|
||||
|
||||
const handleClear = useCallback((): void => {
|
||||
handleChange([]);
|
||||
}, [handleChange]);
|
||||
|
||||
const value = variableData.multiSelect
|
||||
? tempSelection || selectValue
|
||||
: selectValue;
|
||||
|
||||
const defaultValue = variableData.defaultValue || selectValue;
|
||||
|
||||
const onChange = useMemo(() => {
|
||||
return variableData.multiSelect ? handleTempChange : handleChange;
|
||||
}, [variableData.multiSelect, handleTempChange, handleChange]);
|
||||
|
||||
return {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
enableSelectAll,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
value,
|
||||
defaultValue,
|
||||
onChange,
|
||||
};
|
||||
}
|
||||
@@ -381,3 +381,16 @@ export const uniqueValues = (values: string[] | string): string[] | string => {
|
||||
|
||||
return values;
|
||||
};
|
||||
|
||||
export const getSelectValue = (
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
variableData: IDashboardVariable,
|
||||
): string | string[] | undefined => {
|
||||
if (Array.isArray(selectedValue)) {
|
||||
if (!variableData.multiSelect && selectedValue.length === 1) {
|
||||
return selectedValue[0]?.toString();
|
||||
}
|
||||
return selectedValue.map((item) => item.toString());
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
.chart-manager-container {
|
||||
width: 100%;
|
||||
max-height: calc(40% - 40px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.chart-manager-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
|
||||
.chart-manager-actions-container {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { Button, Input } from 'antd';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
|
||||
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
|
||||
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
|
||||
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import './ChartManager.styles.scss';
|
||||
|
||||
interface ChartManagerProps {
|
||||
config: UPlotConfigBuilder;
|
||||
alignedData: uPlot.AlignedData;
|
||||
yAxisUnit?: string;
|
||||
onCancel?: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* ChartManager provides a tabular view to manage the visibility of
|
||||
* individual series on a uPlot chart.
|
||||
*
|
||||
* It syncs with the legend state coming from the plot context and
|
||||
* allows users to:
|
||||
* - filter series by label
|
||||
* - toggle individual series on/off
|
||||
* - persist the visibility configuration to local storage.
|
||||
*
|
||||
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
|
||||
* @param alignedData - uPlot aligned data used to build the initial table dataset.
|
||||
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
|
||||
* @param onCancel - Optional callback invoked when the user cancels the dialog.
|
||||
*/
|
||||
export default function ChartManager({
|
||||
config,
|
||||
alignedData,
|
||||
yAxisUnit,
|
||||
onCancel,
|
||||
}: ChartManagerProps): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const { legendItemsMap } = useLegendsSync({
|
||||
config,
|
||||
subscribeToFocusChange: false,
|
||||
});
|
||||
const {
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
syncSeriesVisibilityToLocalStorage,
|
||||
} = usePlotContext();
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
|
||||
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
|
||||
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
|
||||
);
|
||||
|
||||
const graphVisibilityState = useMemo(
|
||||
() =>
|
||||
Object.entries(legendItemsMap).reduce<boolean[]>((acc, [key, item]) => {
|
||||
acc[Number(key)] = item.show;
|
||||
return acc;
|
||||
}, []),
|
||||
[legendItemsMap],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
setTableDataSet(
|
||||
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
|
||||
);
|
||||
}, [alignedData, config]);
|
||||
|
||||
const filterHandler = useCallback(
|
||||
(event: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
const value = event.target.value.toString().toLowerCase();
|
||||
const updatedDataSet = tableDataSet.map((item) => {
|
||||
if (item.label?.toLocaleLowerCase().includes(value)) {
|
||||
return { ...item, show: true };
|
||||
}
|
||||
return { ...item, show: false };
|
||||
});
|
||||
setTableDataSet(updatedDataSet);
|
||||
},
|
||||
[tableDataSet],
|
||||
);
|
||||
|
||||
const dataSource = useMemo(
|
||||
() =>
|
||||
tableDataSet.filter(
|
||||
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
|
||||
),
|
||||
[tableDataSet],
|
||||
);
|
||||
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
getGraphManagerTableColumns({
|
||||
tableDataSet,
|
||||
checkBoxOnChangeHandler: (_e, index) => {
|
||||
onToggleSeriesOnOff(index);
|
||||
},
|
||||
graphVisibilityState,
|
||||
labelClickedHandler: onToggleSeriesVisibility,
|
||||
yAxisUnit,
|
||||
isGraphDisabled: isDashboardLocked,
|
||||
}),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
|
||||
);
|
||||
|
||||
const handleSave = useCallback((): void => {
|
||||
syncSeriesVisibilityToLocalStorage();
|
||||
notifications.success({
|
||||
message: 'The updated graphs & legends are saved',
|
||||
});
|
||||
if (onCancel) {
|
||||
onCancel();
|
||||
}
|
||||
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
|
||||
|
||||
return (
|
||||
<div className="chart-manager-container">
|
||||
<div className="chart-manager-header">
|
||||
<Input onChange={filterHandler} placeholder="Filter Series" />
|
||||
<div className="chart-manager-actions-container">
|
||||
<Button type="default" onClick={onCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="primary" onClick={handleSave}>
|
||||
Save
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="chart-manager-table-container">
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
dataSource={dataSource}
|
||||
virtual
|
||||
rowKey="index"
|
||||
scroll={{ y: 200 }}
|
||||
pagination={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
import { useCallback } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import {
|
||||
getTimeRangeFromStepInterval,
|
||||
isApmMetric,
|
||||
} from 'container/PanelWrapper/utils';
|
||||
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
|
||||
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
|
||||
import {
|
||||
PopoverPosition,
|
||||
useCoordinates,
|
||||
} from 'periscope/components/ContextMenu';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
interface UseTimeSeriesContextMenuParams {
|
||||
widget: Widgets;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
Error
|
||||
>;
|
||||
}
|
||||
|
||||
export const usePanelContextMenu = ({
|
||||
widget,
|
||||
queryResponse,
|
||||
}: UseTimeSeriesContextMenuParams): {
|
||||
coordinates: { x: number; y: number } | null;
|
||||
popoverPosition: PopoverPosition | null;
|
||||
onClose: () => void;
|
||||
menuItemsConfig: {
|
||||
header?: string | React.ReactNode;
|
||||
items?: React.ReactNode;
|
||||
};
|
||||
clickHandlerWithContextMenu: (...args: any[]) => void;
|
||||
} => {
|
||||
const {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
clickedData,
|
||||
onClose,
|
||||
subMenu,
|
||||
onClick,
|
||||
setSubMenu,
|
||||
} = useCoordinates();
|
||||
|
||||
const { menuItemsConfig } = useGraphContextMenu({
|
||||
widgetId: widget.id || '',
|
||||
query: widget.query,
|
||||
graphData: clickedData,
|
||||
onClose,
|
||||
coordinates,
|
||||
subMenu,
|
||||
setSubMenu,
|
||||
contextLinks: widget.contextLinks,
|
||||
panelType: widget.panelTypes,
|
||||
queryRange: queryResponse,
|
||||
});
|
||||
|
||||
const clickHandlerWithContextMenu = useCallback(
|
||||
(...args: any[]) => {
|
||||
const [
|
||||
xValue,
|
||||
_yvalue,
|
||||
_mouseX,
|
||||
_mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
axesData,
|
||||
focusedSeries,
|
||||
] = args;
|
||||
|
||||
const data = getUplotClickData({
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
focusedSeries,
|
||||
});
|
||||
|
||||
let timeRange;
|
||||
|
||||
if (axesData && queryData?.queryName) {
|
||||
const compositeQuery = (queryResponse?.data?.params as any)?.compositeQuery;
|
||||
|
||||
if (compositeQuery?.queries) {
|
||||
const specificQuery = compositeQuery.queries.find(
|
||||
(query: any) => query.spec?.name === queryData.queryName,
|
||||
);
|
||||
|
||||
const stepInterval = specificQuery?.spec?.stepInterval || 60;
|
||||
|
||||
timeRange = getTimeRangeFromStepInterval(
|
||||
stepInterval,
|
||||
metric?.clickedTimestamp || xValue,
|
||||
specificQuery?.spec?.signal === DataSource.METRICS &&
|
||||
isApmMetric(specificQuery?.spec?.aggregations[0]?.metricName),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (data && data?.record?.queryName) {
|
||||
onClick(data.coord, { ...data.record, label: data.label, timeRange });
|
||||
}
|
||||
},
|
||||
[onClick, queryResponse],
|
||||
);
|
||||
|
||||
return {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
onClose,
|
||||
menuItemsConfig,
|
||||
clickHandlerWithContextMenu,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,4 @@
|
||||
.panel-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
@@ -0,0 +1,176 @@
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
|
||||
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { LineInterpolation } from 'lib/uPlotV2/config/types';
|
||||
import { ContextMenu } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
import { getTimeRange } from 'utils/getTimeRange';
|
||||
|
||||
import { prepareChartData, prepareUPlotConfig } from '../TimeSeriesPanel/utils';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
|
||||
function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const {
|
||||
panelMode,
|
||||
queryResponse,
|
||||
widget,
|
||||
onDragSelect,
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
setMinTimeScale(startTime);
|
||||
setMaxTimeScale(endTime);
|
||||
}, [queryResponse]);
|
||||
|
||||
const {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
onClose,
|
||||
menuItemsConfig,
|
||||
clickHandlerWithContextMenu,
|
||||
} = usePanelContextMenu({
|
||||
widget,
|
||||
queryResponse,
|
||||
});
|
||||
|
||||
const chartData = useMemo(() => {
|
||||
if (!queryResponse?.data?.payload) {
|
||||
return [];
|
||||
}
|
||||
return prepareChartData(queryResponse?.data?.payload);
|
||||
}, [queryResponse?.data?.payload]);
|
||||
|
||||
const config = useMemo(() => {
|
||||
const tzDate = (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
|
||||
|
||||
return prepareUPlotConfig({
|
||||
widgetId: widget.id || '',
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
tzDate,
|
||||
minTimeScale: minTimeScale,
|
||||
maxTimeScale: maxTimeScale,
|
||||
isLogScale: widget?.isLogScale ?? false,
|
||||
thresholds: {
|
||||
scaleKey: 'y',
|
||||
thresholds: (widget.thresholds || []).map((threshold) => ({
|
||||
thresholdValue: threshold.thresholdValue ?? 0,
|
||||
thresholdColor: threshold.thresholdColor,
|
||||
thresholdUnit: threshold.thresholdUnit,
|
||||
thresholdLabel: threshold.thresholdLabel,
|
||||
})),
|
||||
yAxisUnit: widget.yAxisUnit,
|
||||
},
|
||||
yAxisUnit: widget.yAxisUnit || '',
|
||||
softMin: widget.softMin === undefined ? null : widget.softMin,
|
||||
softMax: widget.softMax === undefined ? null : widget.softMax,
|
||||
spanGaps: false,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
isDarkMode,
|
||||
onClick: clickHandlerWithContextMenu,
|
||||
onDragSelect,
|
||||
currentQuery: widget.query,
|
||||
panelMode,
|
||||
});
|
||||
}, [
|
||||
widget.id,
|
||||
maxTimeScale,
|
||||
minTimeScale,
|
||||
timezone.value,
|
||||
widget.customLegendColors,
|
||||
widget.isLogScale,
|
||||
widget.softMax,
|
||||
widget.softMin,
|
||||
isDarkMode,
|
||||
queryResponse?.data?.payload,
|
||||
widget.query,
|
||||
widget.thresholds,
|
||||
widget.yAxisUnit,
|
||||
panelMode,
|
||||
clickHandlerWithContextMenu,
|
||||
onDragSelect,
|
||||
]);
|
||||
|
||||
const layoutChildren = useMemo(() => {
|
||||
if (!isFullViewMode) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<ChartManager
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
}, [
|
||||
isFullViewMode,
|
||||
config,
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="panel-container" ref={graphRef}>
|
||||
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
|
||||
<TimeSeries
|
||||
config={config}
|
||||
legendConfig={{
|
||||
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
|
||||
}}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
height={containerDimensions.height}
|
||||
layoutChildren={layoutChildren}
|
||||
>
|
||||
<ContextMenu
|
||||
coordinates={coordinates}
|
||||
popoverPosition={popoverPosition}
|
||||
title={menuItemsConfig.header as string}
|
||||
items={menuItemsConfig.items}
|
||||
onClose={onClose}
|
||||
/>
|
||||
</TimeSeries>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TimeSeriesPanel;
|
||||
@@ -0,0 +1,170 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
fillMissingXAxisTimestamps,
|
||||
getXAxisTimestamps,
|
||||
} from 'container/DashboardContainer/visualization/panels/utils';
|
||||
import { getLegend } from 'lib/dashboard/getQueryResults';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import onClickPlugin, {
|
||||
OnClickPluginOpts,
|
||||
} from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import {
|
||||
DistributionType,
|
||||
DrawStyle,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
SelectionPreferencesSource,
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
|
||||
export const prepareChartData = (
|
||||
apiResponse: MetricRangePayloadProps,
|
||||
): uPlot.AlignedData => {
|
||||
const seriesList = apiResponse?.data?.result || [];
|
||||
const timestampArr = getXAxisTimestamps(seriesList);
|
||||
const yAxisValuesArr = fillMissingXAxisTimestamps(timestampArr, seriesList);
|
||||
|
||||
return [timestampArr, ...yAxisValuesArr];
|
||||
};
|
||||
|
||||
export const prepareUPlotConfig = ({
|
||||
widgetId,
|
||||
apiResponse,
|
||||
tzDate,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
isLogScale,
|
||||
thresholds,
|
||||
softMin,
|
||||
softMax,
|
||||
spanGaps,
|
||||
colorMapping,
|
||||
lineInterpolation,
|
||||
isDarkMode,
|
||||
currentQuery,
|
||||
onDragSelect,
|
||||
onClick,
|
||||
yAxisUnit,
|
||||
panelMode,
|
||||
}: {
|
||||
widgetId: string;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
tzDate: uPlot.LocalDateFromUnix;
|
||||
minTimeScale: number | undefined;
|
||||
maxTimeScale: number | undefined;
|
||||
isLogScale: boolean;
|
||||
softMin: number | null;
|
||||
softMax: number | null;
|
||||
spanGaps: boolean;
|
||||
colorMapping: Record<string, string>;
|
||||
lineInterpolation: LineInterpolation;
|
||||
isDarkMode: boolean;
|
||||
thresholds: ThresholdsDrawHookOptions;
|
||||
currentQuery: Query;
|
||||
yAxisUnit: string;
|
||||
onDragSelect: (startTime: number, endTime: number) => void;
|
||||
onClick?: OnClickPluginOpts['onClick'];
|
||||
panelMode: PanelMode;
|
||||
}): UPlotConfigBuilder => {
|
||||
const builder = new UPlotConfigBuilder({
|
||||
onDragSelect,
|
||||
widgetId,
|
||||
tzDate,
|
||||
shouldSaveSelectionPreference: panelMode === PanelMode.DASHBOARD_VIEW,
|
||||
selectionPreferencesSource: [
|
||||
PanelMode.DASHBOARD_VIEW,
|
||||
PanelMode.STANDALONE_VIEW,
|
||||
].includes(panelMode)
|
||||
? SelectionPreferencesSource.LOCAL_STORAGE
|
||||
: SelectionPreferencesSource.IN_MEMORY,
|
||||
});
|
||||
|
||||
// X scale – time axis
|
||||
builder.addScale({
|
||||
scaleKey: 'x',
|
||||
time: true,
|
||||
min: minTimeScale,
|
||||
max: maxTimeScale,
|
||||
logBase: isLogScale ? 10 : undefined,
|
||||
distribution: isLogScale
|
||||
? DistributionType.Logarithmic
|
||||
: DistributionType.Linear,
|
||||
});
|
||||
|
||||
// Y scale – value axis, driven primarily by softMin/softMax and data
|
||||
builder.addScale({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
min: undefined,
|
||||
max: undefined,
|
||||
softMin: softMin ?? undefined,
|
||||
softMax: softMax ?? undefined,
|
||||
thresholds,
|
||||
logBase: isLogScale ? 10 : undefined,
|
||||
distribution: isLogScale
|
||||
? DistributionType.Logarithmic
|
||||
: DistributionType.Linear,
|
||||
});
|
||||
|
||||
builder.addThresholds(thresholds);
|
||||
|
||||
if (typeof onClick === 'function') {
|
||||
builder.addPlugin(
|
||||
onClickPlugin({
|
||||
onClick,
|
||||
apiResponse,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
builder.addAxis({
|
||||
scaleKey: 'x',
|
||||
show: true,
|
||||
side: 2,
|
||||
isDarkMode,
|
||||
isLogScale: false,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
});
|
||||
|
||||
builder.addAxis({
|
||||
scaleKey: 'y',
|
||||
show: true,
|
||||
side: 3,
|
||||
isDarkMode,
|
||||
isLogScale: false,
|
||||
yAxisUnit,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
});
|
||||
|
||||
apiResponse.data?.result?.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
series.legend || '',
|
||||
);
|
||||
|
||||
const label = currentQuery
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Line,
|
||||
label: label,
|
||||
colorMapping,
|
||||
spanGaps,
|
||||
lineStyle: LineStyle.Solid,
|
||||
lineInterpolation,
|
||||
showPoints: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
});
|
||||
});
|
||||
return builder;
|
||||
};
|
||||
@@ -0,0 +1,54 @@
|
||||
import { normalizePlotValue } from 'lib/uPlotV2/utils/dataUtils';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
export function getXAxisTimestamps(seriesList: QueryData[]): number[] {
|
||||
const timestamps = new Set<number>();
|
||||
|
||||
seriesList.forEach((series: { values?: [number, string][] }) => {
|
||||
if (series?.values) {
|
||||
series.values.forEach((value) => {
|
||||
timestamps.add(value[0]);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const timestampsArr = Array.from(timestamps);
|
||||
timestampsArr.sort((a, b) => a - b);
|
||||
|
||||
return timestampsArr;
|
||||
}
|
||||
|
||||
export function fillMissingXAxisTimestamps(
|
||||
timestampArr: number[],
|
||||
data: Array<{ values?: [number, string][] }>,
|
||||
): (number | null)[][] {
|
||||
// Ensure we work with a sorted, de‑duplicated list of x-axis timestamps
|
||||
const canonicalTimestamps = Array.from(new Set(timestampArr)).sort(
|
||||
(a, b) => a - b,
|
||||
);
|
||||
|
||||
return data.map(({ values }) =>
|
||||
buildSeriesYValues(canonicalTimestamps, values),
|
||||
);
|
||||
}
|
||||
|
||||
function buildSeriesYValues(
|
||||
timestamps: number[],
|
||||
values?: [number, string][],
|
||||
): (number | null)[] {
|
||||
if (!values?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const valueByTimestamp = new Map<number, number | null>();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const [timestamp, rawValue] = values[i];
|
||||
valueByTimestamp.set(timestamp, normalizePlotValue(rawValue));
|
||||
}
|
||||
|
||||
return timestamps.map((timestamp) => {
|
||||
const value = valueByTimestamp.get(timestamp);
|
||||
return value !== undefined ? value : null;
|
||||
});
|
||||
}
|
||||
@@ -24,6 +24,14 @@
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.legend-empty-state {
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-400);
|
||||
text-align: center;
|
||||
padding: 12px;
|
||||
padding: 2rem 0;
|
||||
}
|
||||
|
||||
.legend-virtuoso-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
@@ -81,6 +81,13 @@ export default function Legend({
|
||||
[focusedSeriesIndex, position],
|
||||
);
|
||||
|
||||
const isEmptyState = useMemo(() => {
|
||||
if (position !== LegendPosition.RIGHT || !legendSearchQuery.trim()) {
|
||||
return false;
|
||||
}
|
||||
return visibleLegendItems.length === 0;
|
||||
}, [position, legendSearchQuery, visibleLegendItems]);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={legendContainerRef}
|
||||
@@ -103,15 +110,21 @@ export default function Legend({
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<VirtuosoGrid
|
||||
className={cx(
|
||||
'legend-virtuoso-container',
|
||||
`legend-virtuoso-container-${position.toLowerCase()}`,
|
||||
{ 'legend-virtuoso-container-single-row': isSingleRow },
|
||||
)}
|
||||
data={visibleLegendItems}
|
||||
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
|
||||
/>
|
||||
{isEmptyState ? (
|
||||
<div className="legend-empty-state">
|
||||
No series found matching "{legendSearchQuery}"
|
||||
</div>
|
||||
) : (
|
||||
<VirtuosoGrid
|
||||
className={cx(
|
||||
'legend-virtuoso-container',
|
||||
`legend-virtuoso-container-${position.toLowerCase()}`,
|
||||
{ 'legend-virtuoso-container-single-row': isSingleRow },
|
||||
)}
|
||||
data={visibleLegendItems}
|
||||
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { initializeVariableFetchStore } from '../variableFetchStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
@@ -62,9 +63,30 @@ export function buildDependencyData(
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the variable fetch store with the computed dependency data
|
||||
*/
|
||||
function initializeFetchStore(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
dependencyData: IDependencyData | null,
|
||||
): void {
|
||||
if (dependencyData) {
|
||||
const allVariableNames = sortedVariablesArray
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
|
||||
initializeVariableFetchStore(
|
||||
allVariableNames,
|
||||
dependencyData.graph,
|
||||
dependencyData.parentDependencyGraph,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute derived values from variables
|
||||
* This is a composition of buildSortedVariablesArray and buildDependencyData
|
||||
* Also initializes the variable fetch store with the new dependency data
|
||||
*/
|
||||
export function computeDerivedValues(
|
||||
variables: IDashboardVariablesStoreState['variables'],
|
||||
@@ -75,15 +97,22 @@ export function computeDerivedValues(
|
||||
const sortedVariablesArray = buildSortedVariablesArray(variables);
|
||||
const dependencyData = buildDependencyData(sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is computed
|
||||
initializeFetchStore(sortedVariablesArray, dependencyData);
|
||||
|
||||
return { sortedVariablesArray, dependencyData };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update derived values in the store state (for use with immer)
|
||||
* Also initializes the variable fetch store with the new dependency data
|
||||
*/
|
||||
export function updateDerivedValues(
|
||||
draft: IDashboardVariablesStoreState,
|
||||
): void {
|
||||
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
|
||||
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is updated
|
||||
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
|
||||
}
|
||||
|
||||
57
frontend/src/providers/Dashboard/store/variableFetchStore.ts
Normal file
57
frontend/src/providers/Dashboard/store/variableFetchStore.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
|
||||
import createStore from './store';
|
||||
|
||||
// Fetch state for each variable
|
||||
export type VariableFetchState =
|
||||
| 'idle' // stable state - initial or complete
|
||||
| 'loading' // actively fetching data (first time)
|
||||
| 'revalidating' // refetching existing data
|
||||
| 'waiting' // blocked on parent dependencies
|
||||
| 'error';
|
||||
|
||||
export interface IVariableFetchStoreState {
|
||||
// Per-variable fetch state
|
||||
states: Record<string, VariableFetchState>;
|
||||
|
||||
// Dependency graphs (set once when variables change)
|
||||
dependencyGraph: VariableGraph; // variable -> children that depend on it
|
||||
parentGraph: VariableGraph; // variable -> parents it depends on
|
||||
|
||||
// Track last update timestamp per variable to trigger re-fetches
|
||||
lastUpdated: Record<string, number>;
|
||||
}
|
||||
|
||||
const initialState: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
dependencyGraph: {},
|
||||
parentGraph: {},
|
||||
lastUpdated: {},
|
||||
};
|
||||
|
||||
export const variableFetchStore = createStore<IVariableFetchStoreState>(
|
||||
initialState,
|
||||
);
|
||||
|
||||
// ============== Actions ==============
|
||||
|
||||
/**
|
||||
* Initialize the store with dependency graphs and set initial states
|
||||
*/
|
||||
export function initializeVariableFetchStore(
|
||||
variableNames: string[],
|
||||
dependencyGraph: VariableGraph,
|
||||
parentGraph: VariableGraph,
|
||||
): void {
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.dependencyGraph = dependencyGraph;
|
||||
draft.parentGraph = parentGraph;
|
||||
|
||||
// Initialize all variables to idle, preserving existing ready states
|
||||
variableNames.forEach((name) => {
|
||||
if (!draft.states[name]) {
|
||||
draft.states[name] = 'idle';
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -325,7 +325,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnprocessableEntity},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
|
||||
@@ -12,30 +12,60 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
derivedKeyHTTPURL = "http_url" // https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url
|
||||
derivedKeyHTTPHost = "http_host"
|
||||
urlPathKeyLegacy = "http.url"
|
||||
serverAddressKeyLegacy = "net.peer.name"
|
||||
|
||||
urlPathKey = "url.full"
|
||||
serverAddressKey = "server.address"
|
||||
)
|
||||
|
||||
var defaultStepInterval = 60 * time.Second
|
||||
|
||||
var (
|
||||
groupByKeyHTTPHost = qbtypes.GroupByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: derivedKeyHTTPHost,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
type SemconvFieldMapping struct {
|
||||
LegacyField string
|
||||
CurrentField string
|
||||
FieldType telemetrytypes.FieldDataType
|
||||
Context telemetrytypes.FieldContext
|
||||
}
|
||||
|
||||
var dualSemconvGroupByKeys = map[string][]qbtypes.GroupByKey{
|
||||
"server": {
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: serverAddressKey,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
}
|
||||
groupByKeyHTTPURL = qbtypes.GroupByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: derivedKeyHTTPURL,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: serverAddressKeyLegacy,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
},
|
||||
"url": {
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: urlPathKey,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: urlPathKeyLegacy,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
|
||||
if result == nil || result.Data.Results == nil {
|
||||
@@ -84,6 +114,103 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
|
||||
return result
|
||||
}
|
||||
|
||||
func MergeSemconvColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
|
||||
if result == nil || result.Data.Results == nil {
|
||||
return result
|
||||
}
|
||||
|
||||
for _, res := range result.Data.Results {
|
||||
scalarData, ok := res.(*qbtypes.ScalarData)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
serverAddressKeyIdx := -1
|
||||
serverAddressKeyLegacyIdx := -1
|
||||
|
||||
for i, col := range scalarData.Columns {
|
||||
if col.Name == serverAddressKey {
|
||||
serverAddressKeyIdx = i
|
||||
} else if col.Name == serverAddressKeyLegacy {
|
||||
serverAddressKeyLegacyIdx = i
|
||||
}
|
||||
}
|
||||
|
||||
if serverAddressKeyIdx == -1 || serverAddressKeyLegacyIdx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
var newRows [][]any
|
||||
for _, row := range scalarData.Data {
|
||||
if len(row) <= serverAddressKeyIdx || len(row) <= serverAddressKeyLegacyIdx {
|
||||
continue
|
||||
}
|
||||
|
||||
var serverName any
|
||||
if isValidValue(row[serverAddressKeyIdx]) {
|
||||
serverName = row[serverAddressKeyIdx]
|
||||
} else if isValidValue(row[serverAddressKeyLegacyIdx]) {
|
||||
serverName = row[serverAddressKeyLegacyIdx]
|
||||
}
|
||||
|
||||
if serverName != nil {
|
||||
newRow := make([]any, len(row)-1)
|
||||
newRow[0] = serverName
|
||||
|
||||
targetIdx := 1
|
||||
for i, val := range row {
|
||||
if i != serverAddressKeyLegacyIdx && i != serverAddressKeyIdx {
|
||||
if targetIdx < len(newRow) {
|
||||
newRow[targetIdx] = val
|
||||
targetIdx++
|
||||
}
|
||||
}
|
||||
}
|
||||
newRows = append(newRows, newRow)
|
||||
}
|
||||
}
|
||||
|
||||
newColumns := make([]*qbtypes.ColumnDescriptor, len(scalarData.Columns)-1)
|
||||
targetIdx := 0
|
||||
for i, col := range scalarData.Columns {
|
||||
if i == serverAddressKeyIdx {
|
||||
newCol := &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: serverAddressKeyLegacy,
|
||||
FieldDataType: col.FieldDataType,
|
||||
FieldContext: col.FieldContext,
|
||||
Signal: col.Signal,
|
||||
},
|
||||
QueryName: col.QueryName,
|
||||
AggregationIndex: col.AggregationIndex,
|
||||
Meta: col.Meta,
|
||||
Type: col.Type,
|
||||
}
|
||||
newColumns[targetIdx] = newCol
|
||||
targetIdx++
|
||||
} else if i != serverAddressKeyLegacyIdx {
|
||||
newColumns[targetIdx] = col
|
||||
targetIdx++
|
||||
}
|
||||
}
|
||||
|
||||
scalarData.Columns = newColumns
|
||||
scalarData.Data = newRows
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func isValidValue(val any) bool {
|
||||
if val == nil {
|
||||
return false
|
||||
}
|
||||
if str, ok := val.(string); ok {
|
||||
return str != "" && str != "n/a"
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRangeResponse {
|
||||
filteredResults := make([]*qbtypes.QueryRangeResponse, 0, len(results))
|
||||
|
||||
@@ -134,7 +261,7 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
|
||||
|
||||
func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
|
||||
for _, label := range series.Labels {
|
||||
if label.Key.Name == derivedKeyHTTPHost {
|
||||
if label.Key.Name == serverAddressKeyLegacy || label.Key.Name == serverAddressKey {
|
||||
if strVal, ok := label.Value.(string); ok {
|
||||
if net.ParseIP(strVal) != nil {
|
||||
return false
|
||||
@@ -147,10 +274,12 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
|
||||
|
||||
func shouldIncludeRow(row *qbtypes.RawRow) bool {
|
||||
if row.Data != nil {
|
||||
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {
|
||||
if domainStr, ok := domainVal.(string); ok {
|
||||
if net.ParseIP(domainStr) != nil {
|
||||
return false
|
||||
for _, key := range []string{serverAddressKeyLegacy, serverAddressKey} {
|
||||
if domainVal, ok := row.Data[key]; ok {
|
||||
if domainStr, ok := domainVal.(string); ok {
|
||||
if net.ParseIP(domainStr) != nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -158,8 +287,8 @@ func shouldIncludeRow(row *qbtypes.RawRow) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func mergeGroupBy(base qbtypes.GroupByKey, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
|
||||
return append([]qbtypes.GroupByKey{base}, additional...)
|
||||
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
|
||||
return append(base, additional...)
|
||||
}
|
||||
|
||||
func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
|
||||
@@ -225,10 +354,10 @@ func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count_distinct(http_url)"},
|
||||
{Expression: "count_distinct(http.url)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -244,7 +373,7 @@ func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Qu
|
||||
{Expression: "max(timestamp)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -260,7 +389,7 @@ func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
|
||||
{Expression: "rate()"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -278,7 +407,7 @@ func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Query
|
||||
{Expression: "count()"},
|
||||
},
|
||||
Filter: filter,
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -294,7 +423,7 @@ func buildTotalSpanQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
|
||||
{Expression: "count()"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -310,7 +439,7 @@ func buildP99Query(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
|
||||
{Expression: "p99(duration_nano)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -333,10 +462,10 @@ func buildEndpointsInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtyp
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: fmt.Sprintf("rate(%s)", derivedKeyHTTPURL)},
|
||||
{Expression: "rate(http.url)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(groupByKeyHTTPURL, req.GroupBy),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["url"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -390,7 +519,8 @@ func buildLastSeenInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtype
|
||||
}
|
||||
|
||||
func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
|
||||
baseExpression := fmt.Sprintf("%s EXISTS AND kind_string = 'Client'", derivedKeyHTTPURL)
|
||||
baseExpression := fmt.Sprintf("(%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
|
||||
urlPathKeyLegacy, urlPathKey)
|
||||
|
||||
if additionalFilter != nil && additionalFilter.Expression != "" {
|
||||
// even if it contains kind_string we add with an AND so it doesn't matter if the user is overriding it.
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
package thirdpartyapi
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -29,7 +28,7 @@ func TestFilterResponse(t *testing.T) {
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
|
||||
Value: "192.168.1.1",
|
||||
},
|
||||
},
|
||||
@@ -37,7 +36,7 @@ func TestFilterResponse(t *testing.T) {
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
|
||||
Value: "example.com",
|
||||
},
|
||||
},
|
||||
@@ -61,7 +60,7 @@ func TestFilterResponse(t *testing.T) {
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
|
||||
Value: "example.com",
|
||||
},
|
||||
},
|
||||
@@ -85,12 +84,12 @@ func TestFilterResponse(t *testing.T) {
|
||||
Rows: []*qbtypes.RawRow{
|
||||
{
|
||||
Data: map[string]any{
|
||||
derivedKeyHTTPHost: "192.168.1.1",
|
||||
"net.peer.name": "192.168.1.1",
|
||||
},
|
||||
},
|
||||
{
|
||||
Data: map[string]any{
|
||||
derivedKeyHTTPHost: "example.com",
|
||||
"net.peer.name": "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -107,7 +106,7 @@ func TestFilterResponse(t *testing.T) {
|
||||
Rows: []*qbtypes.RawRow{
|
||||
{
|
||||
Data: map[string]any{
|
||||
derivedKeyHTTPHost: "example.com",
|
||||
"net.peer.name": "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -369,7 +369,7 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
|
||||
|
||||
func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error {
|
||||
if !module.config.Password.Reset.AllowSelf {
|
||||
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "users are not allowed to reset their password themselves, please contact an admin to reset your password")
|
||||
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
|
||||
}
|
||||
|
||||
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)
|
||||
|
||||
@@ -5008,6 +5008,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
result = thirdpartyapi.MergeSemconvColumns(result)
|
||||
result = thirdpartyapi.FilterIntermediateColumns(result)
|
||||
|
||||
// Filter IP addresses if ShowIp is false
|
||||
@@ -5064,6 +5065,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
result = thirdpartyapi.MergeSemconvColumns(result)
|
||||
result = thirdpartyapi.FilterIntermediateColumns(result)
|
||||
|
||||
// Filter IP addresses if ShowIp is false
|
||||
|
||||
@@ -71,7 +71,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
// accustom log filters like `body.log.message EXISTS` into EXPR language
|
||||
// where User is attempting to check for keys present in JSON log body
|
||||
if strings.HasPrefix(v.Key.Key, "body.") {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
// if body is a string and is a valid JSON, then check if the key exists in the JSON
|
||||
filter = fmt.Sprintf(`((type(body) == "string" && isJSON(body)) && %s %s %s)`, exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
|
||||
// if body is a map, then check if the key exists in the map
|
||||
operator := v3.FilterOperatorNotEqual
|
||||
if v.Operator == v3.FilterOperatorNotExists {
|
||||
operator = v3.FilterOperatorEqual
|
||||
}
|
||||
nilCheckFilter := fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
|
||||
|
||||
// join the two filters with OR
|
||||
filter = fmt.Sprintf(`(%s or (type(body) == "map" && (%s)))`, filter, nilCheckFilter)
|
||||
} else if typ := getTypeName(v.Key.Type); typ != "" {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
|
||||
} else {
|
||||
|
||||
@@ -3,203 +3,538 @@ package queryBuilderToExpr
|
||||
import (
|
||||
"testing"
|
||||
|
||||
signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/expr-lang/expr/vm"
|
||||
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
Expr string
|
||||
ExpectError bool
|
||||
}{
|
||||
{
|
||||
Name: "equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
|
||||
}},
|
||||
Expr: `attributes["key"] == "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "not equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
|
||||
}},
|
||||
Expr: `attributes["key"] != "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "less than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
|
||||
},
|
||||
{
|
||||
Name: "less than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
|
||||
},
|
||||
// case sensitive
|
||||
{
|
||||
Name: "body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `"log.message" in fromJSON(body)`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"log.message" not in fromJSON(body)`,
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && body not matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "regex with escape characters",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
|
||||
},
|
||||
{
|
||||
Name: "invalid regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
|
||||
ExpectError: true,
|
||||
},
|
||||
{
|
||||
Name: "in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{1, 2, 3, 4}, Operator: "in"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
|
||||
},
|
||||
{
|
||||
Name: "not in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"1", "2"}, Operator: "nin"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
|
||||
},
|
||||
{
|
||||
Name: "exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
Expr: `"key" in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "incorrect multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
|
||||
ExpectError: true,
|
||||
},
|
||||
}
|
||||
func TestParseExpression(t *testing.T) {
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
Expr string
|
||||
ExpectError bool
|
||||
}{
|
||||
{
|
||||
Name: "equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
|
||||
}},
|
||||
Expr: `attributes["key"] == "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "not equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
|
||||
}},
|
||||
Expr: `attributes["key"] != "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "less than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
|
||||
},
|
||||
{
|
||||
Name: "less than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
|
||||
},
|
||||
// case sensitive
|
||||
{
|
||||
Name: "body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" in fromJSON(body)) or (type(body) == "map" && (body.log.message != nil)))`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" not in fromJSON(body)) or (type(body) == "map" && (body.log.message == nil)))`,
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && body not matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "regex with escape characters",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
|
||||
},
|
||||
{
|
||||
Name: "invalid regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
|
||||
ExpectError: true,
|
||||
},
|
||||
{
|
||||
Name: "in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{1, 2, 3, 4}, Operator: "in"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
|
||||
},
|
||||
{
|
||||
Name: "not in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"1", "2"}, Operator: "nin"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
|
||||
},
|
||||
{
|
||||
Name: "exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
Expr: `"key" in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "incorrect multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
|
||||
ExpectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
for _, tt := range testCases {
|
||||
Convey(tt.Name, t, func() {
|
||||
t.Run(tt.Name, func(t *testing.T) {
|
||||
x, err := Parse(tt.Query)
|
||||
if tt.ExpectError {
|
||||
So(err, ShouldNotBeNil)
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
So(err, ShouldBeNil)
|
||||
So(x, ShouldEqual, tt.Expr)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.Expr, x)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type EntryComposite struct {
|
||||
ID int
|
||||
*entry.Entry
|
||||
}
|
||||
|
||||
// makeEntry creates an EntryComposite for tests. Pass nil for traceID/spanID to mean "not set".
|
||||
func makeEntry(id int, body any, attributes, resource map[string]any, traceID, spanID []byte) EntryComposite {
|
||||
e := entry.New()
|
||||
e.Body = body
|
||||
if attributes != nil {
|
||||
e.Attributes = attributes
|
||||
} else {
|
||||
e.Attributes = make(map[string]any)
|
||||
}
|
||||
if resource != nil {
|
||||
e.Resource = resource
|
||||
} else {
|
||||
e.Resource = make(map[string]any)
|
||||
}
|
||||
if traceID != nil {
|
||||
e.TraceID = traceID
|
||||
}
|
||||
if spanID != nil {
|
||||
e.SpanID = spanID
|
||||
}
|
||||
return EntryComposite{ID: id, Entry: e}
|
||||
}
|
||||
|
||||
func TestExpressionVSEntry(t *testing.T) {
|
||||
// Dataset: entries with varied body (JSON and plain text), attributes, trace_id, span_id for filter testing.
|
||||
// IDs 0..12: JSON bodies (body.msg / body.log etc. work). IDs 13..17: simple text log bodies.
|
||||
dataset := []EntryComposite{
|
||||
// JSON body entries (0-12)
|
||||
makeEntry(0, `{"msg":"hello world"}`, map[string]any{"level": "info"}, map[string]any{"env": "prod", "host": "node-0"}, nil, nil),
|
||||
makeEntry(1, `{"msg":"error occurred", "missing": "value"}`, map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-1"}, []byte("trace1"), []byte("span1")),
|
||||
makeEntry(2, `{"msg":"checkbody substring"}`, map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-2"}, []byte("trace2"), nil),
|
||||
makeEntry(3, `{"msg":"no match here"}`, map[string]any{"level": "debug"}, map[string]any{"env": "staging", "host": "node-3"}, nil, []byte("span3")),
|
||||
makeEntry(4, `{"msg":"101regex suffix"}`, map[string]any{"code": "200", "count": int64(5)}, map[string]any{"env": "prod", "host": "node-4"}, nil, nil),
|
||||
makeEntry(5, `{"msg":"plain text only"}`, map[string]any{"code": "404", "count": int64(10)}, map[string]any{"env": "prod", "host": "node-5"}, []byte("trace5"), []byte("span5")),
|
||||
makeEntry(6, `{"log":{"message":"user login"}}`, map[string]any{"service": "auth"}, map[string]any{"env": "dev", "host": "node-6"}, nil, nil),
|
||||
makeEntry(7, `{"log":{"message":"user logout"}}`, map[string]any{"service": "auth", "user_id": "u1"}, map[string]any{"env": "dev", "host": "node-7"}, []byte("trace7"), nil),
|
||||
makeEntry(8, `{"event":"click"}`, map[string]any{"service": "api"}, map[string]any{"env": "dev", "host": "node-8"}, nil, nil),
|
||||
makeEntry(9, `{"msg":"checkbody"}`, map[string]any{"tag": "exact", "num": int64(9)}, map[string]any{"env": "prod", "host": "node-9"}, nil, nil),
|
||||
makeEntry(10, `{"msg":"CHECKBODY case"}`, map[string]any{"tag": "case", "num": int64(10)}, map[string]any{"env": "staging", "host": "node-10"}, nil, nil),
|
||||
makeEntry(11, `{"msg":"foo"}`, map[string]any{"status": "active", "score": int64(100)}, map[string]any{"env": "prod", "host": "node-11"}, nil, nil),
|
||||
makeEntry(12, `{"msg":"bar"}`, map[string]any{"status": "inactive", "score": int64(50)}, map[string]any{"env": "staging", "host": "node-12"}, []byte("trace12"), []byte("span12")),
|
||||
// Plain text log body entries (13-17)
|
||||
makeEntry(13, "Server started on port 8080", map[string]any{"component": "server"}, map[string]any{"env": "prod", "host": "node-13"}, nil, nil),
|
||||
makeEntry(14, "Connection refused to 10.0.0.1:5432", map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-14"}, nil, nil),
|
||||
makeEntry(15, "User login failed for admin", map[string]any{"service": "auth", "level": "warn"}, map[string]any{"env": "dev", "host": "node-15"}, []byte("trace15"), nil),
|
||||
makeEntry(16, "checkbody in text log", map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-16"}, nil, nil),
|
||||
makeEntry(17, "WARN: disk full on /var", map[string]any{"level": "warn"}, map[string]any{"env": "prod", "host": "node-17"}, nil, []byte("span17")),
|
||||
// Body as map (not string) entries (18-20)
|
||||
makeEntry(18, map[string]any{"msg": "checkbody substring", "level": "info"}, map[string]any{"source": "map"}, map[string]any{"env": "prod", "host": "node-18"}, nil, nil),
|
||||
makeEntry(19, map[string]any{"log": map[string]any{"message": "nested value in map body"}, "missing": true}, map[string]any{"source": "map"}, map[string]any{"env": "staging", "host": "node-19"}, []byte("trace19"), nil),
|
||||
makeEntry(20, map[string]any{"event": "deploy", "version": "1.2.0"}, map[string]any{"source": "map", "level": "info"}, map[string]any{"env": "dev", "host": "node-20"}, nil, []byte("span20")),
|
||||
}
|
||||
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
ExpectedMatches []int
|
||||
}{
|
||||
{
|
||||
Name: "resource equal (env)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 4, 5, 9, 11, 13, 14, 17, 18},
|
||||
},
|
||||
{
|
||||
Name: "resource not equal (env)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "!="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 3, 6, 7, 8, 10, 12, 15, 16, 19, 20},
|
||||
},
|
||||
{
|
||||
Name: "attribute less than (numeric)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: "<"},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "attribute greater than (numeric)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: ">"},
|
||||
}},
|
||||
ExpectedMatches: []int{5},
|
||||
},
|
||||
{
|
||||
Name: "body contains (case insensitive)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 9, 10, 16},
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 17},
|
||||
},
|
||||
{
|
||||
Name: "body.msg (case insensitive)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: false}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 9, 10, 18},
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "nregex"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
|
||||
},
|
||||
// body.log.message exists/nexists: expr checks "log.message" in fromJSON(body); nested key
|
||||
// semantics depend on signoz stanza helper. Omitted here to avoid coupling to env shape.
|
||||
{
|
||||
Name: "body top-level key exists (body.msg)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 18},
|
||||
},
|
||||
{
|
||||
Name: "body top-level key not exists (body.missing)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.missing", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 18, 20},
|
||||
},
|
||||
{
|
||||
Name: "attribute exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7, 8, 15},
|
||||
},
|
||||
{
|
||||
Name: "attribute not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20},
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 2, 5, 7, 12, 15, 19},
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 16, 17, 18, 20},
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 3, 5, 12, 17, 20},
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 4, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 18, 19},
|
||||
},
|
||||
{
|
||||
Name: "in (attribute in list)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"info", "error"}, Operator: "in"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 14, 16, 20},
|
||||
},
|
||||
{
|
||||
Name: "not in (attribute not in list)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"error", "warn"}, Operator: "nin"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 3, 16, 20},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 16},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND (two attributes)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7},
|
||||
},
|
||||
// Multi-filter variations: body + attribute, three conditions, trace/span + attribute
|
||||
{
|
||||
Name: "multi filter AND body contains + attribute",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "Connection", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{14},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND body contains + service",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "login", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 15},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND env + level (prod error)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "error", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 14},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND three conditions (staging + checkbody + info)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 16},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND trace_id exists + body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND span_id nexists + service auth",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7, 15},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND body regex + attribute",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
|
||||
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND no trace_id + no span_id + env prod",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 4, 9, 11, 13, 14, 18},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND level warn + body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "warn", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "disk", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{17},
|
||||
},
|
||||
{
|
||||
Name: "no matches (attribute value not present)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "never", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{},
|
||||
},
|
||||
{
|
||||
Name: "attribute equal and trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "404", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{5},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.Name, func(t *testing.T) {
|
||||
expression, err := Parse(tt.Query)
|
||||
assert.NoError(t, err)
|
||||
|
||||
compiled, hasBodyFieldRef, err := signozstanzahelper.ExprCompileBool(expression)
|
||||
assert.NoError(t, err)
|
||||
|
||||
matchedIDs := []int{}
|
||||
for _, d := range dataset {
|
||||
env := signozstanzahelper.GetExprEnv(d.Entry, hasBodyFieldRef)
|
||||
matches, err := vm.Run(compiled, env)
|
||||
signozstanzahelper.PutExprEnv(env)
|
||||
if err != nil {
|
||||
// Eval error (e.g. fromJSON on non-JSON body) => treat as no match
|
||||
continue
|
||||
}
|
||||
if matches != nil && matches.(bool) {
|
||||
matchedIDs = append(matchedIDs, d.ID)
|
||||
}
|
||||
}
|
||||
assert.Equal(t, tt.ExpectedMatches, matchedIDs, "query %q", tt.Name)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package signoz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -23,6 +24,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
@@ -57,6 +60,10 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -97,3 +104,25 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
|
||||
|
||||
return os.WriteFile(path, spec, 0o600)
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
},
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -36,8 +36,8 @@ type ChangePasswordRequest struct {
|
||||
}
|
||||
|
||||
type PostableForgotPassword struct {
|
||||
OrgID valuer.UUID `json:"orgId"`
|
||||
Email valuer.Email `json:"email"`
|
||||
OrgID valuer.UUID `json:"orgId" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
FrontendBaseURL string `json:"frontendBaseURL"`
|
||||
}
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ type LimitConfig struct {
|
||||
}
|
||||
|
||||
type LimitValue struct {
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Count int64 `json:"count,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
Count int64 `json:"count"`
|
||||
}
|
||||
|
||||
type LimitMetric struct {
|
||||
|
||||
@@ -31,6 +31,13 @@ var (
|
||||
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
|
||||
)
|
||||
|
||||
func (TreemapMode) Enum() []any {
|
||||
return []any{
|
||||
TreemapModeTimeSeries,
|
||||
TreemapModeSamples,
|
||||
}
|
||||
}
|
||||
|
||||
// StatsRequest represents the payload accepted by the metrics stats endpoint.
|
||||
type StatsRequest struct {
|
||||
Filter *qbtypes.Filter `json:"filter,omitempty"`
|
||||
@@ -98,7 +105,7 @@ func (req *StatsRequest) UnmarshalJSON(data []byte) error {
|
||||
type Stat struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
TimeSeries uint64 `json:"timeseries" required:"true"`
|
||||
Samples uint64 `json:"samples" required:"true"`
|
||||
@@ -112,9 +119,9 @@ type StatsResponse struct {
|
||||
|
||||
type MetricMetadata struct {
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -131,10 +138,10 @@ func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
|
||||
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
|
||||
type UpdateMetricMetadataRequest struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Type metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
Type metrictypes.Type `json:"type" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
Unit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -144,7 +151,7 @@ type TreemapRequest struct {
|
||||
Start int64 `json:"start" required:"true"`
|
||||
End int64 `json:"end" required:"true"`
|
||||
Limit int `json:"limit" required:"true"`
|
||||
Mode TreemapMode `json:"mode" required:"true" enum:"timeseries,samples"`
|
||||
Mode TreemapMode `json:"mode" required:"true"`
|
||||
}
|
||||
|
||||
// Validate enforces basic constraints on TreemapRequest.
|
||||
|
||||
@@ -36,7 +36,7 @@ func (t Temporality) Value() (driver.Value, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Temporality) Scan(src interface{}) error {
|
||||
func (t *Temporality) Scan(src any) error {
|
||||
if src == nil {
|
||||
*t = Unknown
|
||||
return nil
|
||||
@@ -66,6 +66,14 @@ func (t *Temporality) Scan(src interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (Temporality) Enum() []any {
|
||||
return []any{
|
||||
Delta,
|
||||
Cumulative,
|
||||
Unspecified,
|
||||
}
|
||||
}
|
||||
|
||||
// Type is the type of the metric in OTLP data model
|
||||
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
|
||||
type Type struct {
|
||||
@@ -134,6 +142,16 @@ var (
|
||||
UnspecifiedType = Type{valuer.NewString("")}
|
||||
)
|
||||
|
||||
func (Type) Enum() []any {
|
||||
return []any{
|
||||
GaugeType,
|
||||
SumType,
|
||||
HistogramType,
|
||||
SummaryType,
|
||||
ExpHistogramType,
|
||||
}
|
||||
}
|
||||
|
||||
type TimeAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -151,6 +169,21 @@ var (
|
||||
TimeAggregationIncrease = TimeAggregation{valuer.NewString("increase")}
|
||||
)
|
||||
|
||||
func (TimeAggregation) Enum() []any {
|
||||
return []any{
|
||||
TimeAggregationUnspecified,
|
||||
TimeAggregationLatest,
|
||||
TimeAggregationSum,
|
||||
TimeAggregationAvg,
|
||||
TimeAggregationMin,
|
||||
TimeAggregationMax,
|
||||
TimeAggregationCount,
|
||||
TimeAggregationCountDistinct,
|
||||
TimeAggregationRate,
|
||||
TimeAggregationIncrease,
|
||||
}
|
||||
}
|
||||
|
||||
type SpaceAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -169,6 +202,22 @@ var (
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
)
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
return []any{
|
||||
SpaceAggregationUnspecified,
|
||||
SpaceAggregationSum,
|
||||
SpaceAggregationAvg,
|
||||
SpaceAggregationMin,
|
||||
SpaceAggregationMax,
|
||||
SpaceAggregationCount,
|
||||
SpaceAggregationPercentile50,
|
||||
SpaceAggregationPercentile75,
|
||||
SpaceAggregationPercentile90,
|
||||
SpaceAggregationPercentile95,
|
||||
SpaceAggregationPercentile99,
|
||||
}
|
||||
}
|
||||
|
||||
func (s SpaceAggregation) IsPercentile() bool {
|
||||
return s == SpaceAggregationPercentile50 ||
|
||||
s == SpaceAggregationPercentile75 ||
|
||||
|
||||
707
pkg/types/querybuildertypes/querybuildertypesv5/openapi.go
Normal file
707
pkg/types/querybuildertypes/querybuildertypesv5/openapi.go
Normal file
@@ -0,0 +1,707 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for QueryType.
|
||||
func (QueryType) Enum() []any {
|
||||
return []any{
|
||||
QueryTypeBuilder,
|
||||
QueryTypeFormula,
|
||||
// Not yet supported.
|
||||
// QueryTypeSubQuery,
|
||||
// QueryTypeJoin,
|
||||
QueryTypeTraceOperator,
|
||||
QueryTypeClickHouseSQL,
|
||||
QueryTypePromQL,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for RequestType.
|
||||
func (RequestType) Enum() []any {
|
||||
return []any{
|
||||
RequestTypeScalar,
|
||||
RequestTypeTimeSeries,
|
||||
RequestTypeRaw,
|
||||
RequestTypeRawStream,
|
||||
RequestTypeTrace,
|
||||
// RequestTypeDistribution,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FunctionName.
|
||||
func (FunctionName) Enum() []any {
|
||||
return []any{
|
||||
FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for OrderDirection.
|
||||
func (OrderDirection) Enum() []any {
|
||||
return []any{
|
||||
OrderDirectionAsc,
|
||||
OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for ReduceTo.
|
||||
func (ReduceTo) Enum() []any {
|
||||
return []any{
|
||||
ReduceToSum,
|
||||
ReduceToCount,
|
||||
ReduceToAvg,
|
||||
ReduceToMin,
|
||||
ReduceToMax,
|
||||
ReduceToLast,
|
||||
ReduceToMedian,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for VariableType.
|
||||
func (VariableType) Enum() []any {
|
||||
return []any{
|
||||
QueryVariableType,
|
||||
DynamicVariableType,
|
||||
CustomVariableType,
|
||||
TextBoxVariableType,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for JoinType.
|
||||
func (JoinType) Enum() []any {
|
||||
return []any{
|
||||
JoinTypeInner,
|
||||
JoinTypeLeft,
|
||||
JoinTypeRight,
|
||||
JoinTypeFull,
|
||||
JoinTypeCross,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for ColumnType.
|
||||
func (ColumnType) Enum() []any {
|
||||
return []any{
|
||||
ColumnTypeGroup,
|
||||
ColumnTypeAggregation,
|
||||
}
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
|
||||
type queryEnvelopeBuilderTrace struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
|
||||
type queryEnvelopeBuilderLog struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
|
||||
type queryEnvelopeBuilderMetric struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
|
||||
type queryEnvelopeFormula struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
|
||||
// type queryEnvelopeJoin struct {
|
||||
// Type QueryType `json:"type" description:"The type of the query."`
|
||||
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
|
||||
// }
|
||||
|
||||
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
|
||||
type queryEnvelopeTraceOperator struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
|
||||
type queryEnvelopePromQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
|
||||
type queryEnvelopeClickHouseSQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryEnvelope{}
|
||||
|
||||
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
|
||||
// Each variant represents a different query type with its corresponding spec schema.
|
||||
func (QueryEnvelope) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
queryEnvelopeBuilderTrace{},
|
||||
queryEnvelopeBuilderLog{},
|
||||
queryEnvelopeBuilderMetric{},
|
||||
queryEnvelopeFormula{},
|
||||
// queryEnvelopeJoin{},
|
||||
queryEnvelopeTraceOperator{},
|
||||
queryEnvelopePromQL{},
|
||||
queryEnvelopeClickHouseSQL{},
|
||||
}
|
||||
}
|
||||
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
numSchema.WithDescription("Duration in seconds.")
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryData{}
|
||||
|
||||
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
|
||||
func (QueryData) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
TimeSeriesData{},
|
||||
ScalarData{},
|
||||
RawData{},
|
||||
}
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeRequest{}
|
||||
|
||||
// PrepareJSONSchema adds examples and description to the QueryRangeRequest schema.
|
||||
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
|
||||
schema.WithExamples(
|
||||
// 1. time_series + traces builder: count spans grouped by service, ordered by count
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "span_count",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{
|
||||
"expression": "service.name = 'frontend'",
|
||||
},
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "span_count"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 2. time_series + logs builder: count logs grouped by service
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "log_count",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{
|
||||
"expression": "severity_text = 'ERROR'",
|
||||
},
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "log_count"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 3. time_series + metrics builder (Gauge): latest value averaged across series
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "system.cpu.utilization",
|
||||
"timeAggregation": "latest",
|
||||
"spaceAggregation": "avg",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "host.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 4. time_series + metrics builder (Sum): rate of cumulative counter
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "http.server.duration.count",
|
||||
"timeAggregation": "rate",
|
||||
"spaceAggregation": "sum",
|
||||
},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 5. time_series + metrics builder (Histogram): p99 latency
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "http.server.duration.bucket",
|
||||
"spaceAggregation": "p99",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 6. raw + logs builder: fetch raw log records
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{
|
||||
"expression": "severity_text = 'ERROR'",
|
||||
},
|
||||
"selectFields": []any{
|
||||
map[string]any{
|
||||
"name": "body",
|
||||
"fieldContext": "log",
|
||||
},
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "timestamp", "fieldContext": "log"},
|
||||
"direction": "desc",
|
||||
},
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "id"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 7. raw + traces builder: fetch raw span records
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{
|
||||
"expression": "service.name = 'frontend' AND has_error = true",
|
||||
},
|
||||
"selectFields": []any{
|
||||
map[string]any{
|
||||
"name": "name",
|
||||
"fieldContext": "span",
|
||||
},
|
||||
map[string]any{
|
||||
"name": "duration_nano",
|
||||
"fieldContext": "span",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "timestamp", "fieldContext": "span"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 8. scalar + traces builder: total span count as a single value
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "span_count",
|
||||
},
|
||||
},
|
||||
"filter": map[string]any{
|
||||
"expression": "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 9. scalar + logs builder: total error log count as a single value
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "error_count",
|
||||
},
|
||||
},
|
||||
"filter": map[string]any{
|
||||
"expression": "severity_text = 'ERROR'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 10. scalar + metrics builder: single reduced value with reduceTo
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "http.server.duration.count",
|
||||
"timeAggregation": "rate",
|
||||
"spaceAggregation": "sum",
|
||||
"reduceTo": "sum",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 11. builder formula: error rate from two trace queries
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "countIf(has_error = true)",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 12. PromQL query with UTF-8 dot metric name
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 13. ClickHouse SQL — time_series
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 14. ClickHouse SQL — raw
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "SELECT timestamp, body FROM signoz_logs.distributed_logs_v2 WHERE timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano AND severity_text = 'ERROR' ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 15. ClickHouse SQL — scalar
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "SELECT count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeResponse{}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
|
||||
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &CompositeQuery{}
|
||||
|
||||
// PrepareJSONSchema adds description to the CompositeQuery schema.
|
||||
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &ExecStats{}
|
||||
|
||||
// PrepareJSONSchema adds description to the ExecStats schema.
|
||||
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
|
||||
return nil
|
||||
}
|
||||
48
pkg/types/telemetrytypes/openapi.go
Normal file
48
pkg/types/telemetrytypes/openapi.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package telemetrytypes
|
||||
|
||||
// Enum returns the acceptable values for Signal.
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldContext.
|
||||
func (FieldContext) Enum() []any {
|
||||
return []any{
|
||||
FieldContextMetric,
|
||||
FieldContextLog,
|
||||
FieldContextSpan,
|
||||
// FieldContextTrace,
|
||||
FieldContextResource,
|
||||
// FieldContextScope,
|
||||
FieldContextAttribute,
|
||||
// FieldContextEvent,
|
||||
FieldContextBody,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for Source.
|
||||
func (Source) Enum() []any {
|
||||
return []any{
|
||||
SourceMeter,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldDataType.
|
||||
func (FieldDataType) Enum() []any {
|
||||
return []any{
|
||||
FieldDataTypeString,
|
||||
FieldDataTypeBool,
|
||||
FieldDataTypeFloat64,
|
||||
FieldDataTypeInt64,
|
||||
FieldDataTypeNumber,
|
||||
// FieldDataTypeArrayString,
|
||||
// FieldDataTypeArrayFloat64,
|
||||
// FieldDataTypeArrayBool,
|
||||
// FieldDataTypeArrayInt64,
|
||||
// FieldDataTypeArrayNumber,
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user