mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-09 03:02:20 +00:00
Compare commits
12 Commits
fix/extern
...
migrate-fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
324d344470 | ||
|
|
b420ca494e | ||
|
|
068dcc9ec3 | ||
|
|
af6615ce49 | ||
|
|
f7b80c030c | ||
|
|
c9fb4e8590 | ||
|
|
c9bb2f2125 | ||
|
|
e4693ce64c | ||
|
|
ca9b3a910a | ||
|
|
9dc7d2389a | ||
|
|
6c4f1b8741 | ||
|
|
043edb2b6d |
@@ -607,6 +607,178 @@ paths:
|
||||
summary: Update auth domain
|
||||
tags:
|
||||
- authdomains
|
||||
/api/v1/fields/keys:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint returns field keys
|
||||
operationId: GetFieldsKeys
|
||||
parameters:
|
||||
- in: query
|
||||
name: signal
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: source
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: limit
|
||||
schema:
|
||||
type: integer
|
||||
- in: query
|
||||
name: startUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: endUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: fieldContext
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: fieldDataType
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: metricName
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: searchText
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Get field keys
|
||||
tags:
|
||||
- fields
|
||||
/api/v1/fields/values:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint returns field values
|
||||
operationId: GetFieldsValues
|
||||
parameters:
|
||||
- in: query
|
||||
name: signal
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: source
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: limit
|
||||
schema:
|
||||
type: integer
|
||||
- in: query
|
||||
name: startUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: endUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: fieldContext
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: fieldDataType
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: metricName
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: searchText
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: name
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: existingQuery
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Get field values
|
||||
tags:
|
||||
- fields
|
||||
/api/v1/getResetPasswordToken/{id}:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -2226,6 +2398,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"422":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unprocessable Entity
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
@@ -3922,19 +4100,9 @@ components:
|
||||
isMonotonic:
|
||||
type: boolean
|
||||
temporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
$ref: '#/components/schemas/MetrictypesTemporality'
|
||||
type:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -3957,13 +4125,7 @@ components:
|
||||
minimum: 0
|
||||
type: integer
|
||||
type:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4024,6 +4186,11 @@ components:
|
||||
- percentage
|
||||
- totalValue
|
||||
type: object
|
||||
MetricsexplorertypesTreemapMode:
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
MetricsexplorertypesTreemapRequest:
|
||||
properties:
|
||||
end:
|
||||
@@ -4034,10 +4201,7 @@ components:
|
||||
limit:
|
||||
type: integer
|
||||
mode:
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
$ref: '#/components/schemas/MetricsexplorertypesTreemapMode'
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
@@ -4072,19 +4236,9 @@ components:
|
||||
metricName:
|
||||
type: string
|
||||
temporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
$ref: '#/components/schemas/MetrictypesTemporality'
|
||||
type:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4095,6 +4249,20 @@ components:
|
||||
- temporality
|
||||
- isMonotonic
|
||||
type: object
|
||||
MetrictypesTemporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
MetrictypesType:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
PreferencetypesPreference:
|
||||
properties:
|
||||
allowedScopes:
|
||||
@@ -4248,6 +4416,60 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
TelemetrytypesGettableFieldKeys:
|
||||
properties:
|
||||
complete:
|
||||
type: boolean
|
||||
keys:
|
||||
additionalProperties:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
nullable: true
|
||||
type: object
|
||||
type: object
|
||||
TelemetrytypesGettableFieldValues:
|
||||
properties:
|
||||
complete:
|
||||
type: boolean
|
||||
values:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
|
||||
type: object
|
||||
TelemetrytypesTelemetryFieldKey:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
fieldContext:
|
||||
type: string
|
||||
fieldDataType:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
signal:
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
TelemetrytypesTelemetryFieldValues:
|
||||
properties:
|
||||
boolValues:
|
||||
items:
|
||||
type: boolean
|
||||
type: array
|
||||
numberValues:
|
||||
items:
|
||||
format: double
|
||||
type: number
|
||||
type: array
|
||||
relatedValues:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
stringValues:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
TypesChangePasswordRequest:
|
||||
properties:
|
||||
newPassword:
|
||||
@@ -4376,6 +4598,9 @@ components:
|
||||
type: string
|
||||
orgId:
|
||||
type: string
|
||||
required:
|
||||
- orgId
|
||||
- email
|
||||
type: object
|
||||
TypesPostableInvite:
|
||||
properties:
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -56,7 +55,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
|
||||
@@ -237,7 +237,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
|
||||
apiHandler.RegisterFieldsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
|
||||
@@ -762,18 +762,6 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
|
||||
totalTimeSeries: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesMetricMetadataDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -783,29 +771,14 @@ export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
* @type boolean
|
||||
*/
|
||||
isMonotonic: boolean;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesMetricMetadataDTOType;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesStatDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesStatDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -825,11 +798,7 @@ export interface MetricsexplorertypesStatDTO {
|
||||
* @minimum 0
|
||||
*/
|
||||
timeseries: number;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesStatDTOType;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -889,7 +858,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
|
||||
totalValue: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesTreemapRequestDTOMode {
|
||||
export enum MetricsexplorertypesTreemapModeDTO {
|
||||
timeseries = 'timeseries',
|
||||
samples = 'samples',
|
||||
}
|
||||
@@ -904,11 +873,7 @@ export interface MetricsexplorertypesTreemapRequestDTO {
|
||||
* @type integer
|
||||
*/
|
||||
limit: number;
|
||||
/**
|
||||
* @enum timeseries,samples
|
||||
* @type string
|
||||
*/
|
||||
mode: MetricsexplorertypesTreemapRequestDTOMode;
|
||||
mode: MetricsexplorertypesTreemapModeDTO;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
@@ -929,18 +894,6 @@ export interface MetricsexplorertypesTreemapResponseDTO {
|
||||
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -954,22 +907,26 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
* @type string
|
||||
*/
|
||||
metricName: string;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetrictypesTemporalityDTO {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetrictypesTypeDTO {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface PreferencetypesPreferenceDTO {
|
||||
/**
|
||||
* @type array
|
||||
@@ -1388,7 +1345,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email?: string;
|
||||
email: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -1396,7 +1353,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId?: string;
|
||||
orgId: string;
|
||||
}
|
||||
|
||||
export interface TypesPostableInviteDTO {
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
|
||||
type CustomVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function CustomVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
}: CustomVariableInputProps): JSX.Element {
|
||||
const optionsData: (string | number | boolean)[] = useMemo(() => {
|
||||
return sortValues(
|
||||
commaValuesParser(variableData.customValue || ''),
|
||||
variableData.sort,
|
||||
) as (string | number | boolean)[];
|
||||
}, [variableData.customValue, variableData.sort]);
|
||||
|
||||
const {
|
||||
value,
|
||||
defaultValue,
|
||||
enableSelectAll,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
});
|
||||
|
||||
return (
|
||||
<SelectVariableInput
|
||||
variableId={variableData.id}
|
||||
options={optionsData}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
onClear={handleClear}
|
||||
enableSelectAll={enableSelectAll}
|
||||
defaultValue={defaultValue}
|
||||
isMultiSelect={variableData.multiSelect}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(CustomVariableInput);
|
||||
@@ -138,6 +138,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}
|
||||
},
|
||||
[
|
||||
// This can be removed
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dependencyData,
|
||||
|
||||
@@ -23,9 +23,9 @@ import { SelectItemStyle } from './styles';
|
||||
import {
|
||||
areArraysEqual,
|
||||
getOptionsForDynamicVariable,
|
||||
getSelectValue,
|
||||
uniqueValues,
|
||||
} from './util';
|
||||
import { getSelectValue } from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
|
||||
@@ -0,0 +1,229 @@
|
||||
import { memo, useCallback, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { isArray, isString } from 'lodash-es';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { variablePropsToPayloadVariables } from '../utils';
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
|
||||
interface QueryVariableInputProps {
|
||||
variableData: IDashboardVariable;
|
||||
existingVariables: Record<string, IDashboardVariable>;
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
function QueryVariableInput({
|
||||
variableData,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
onValueUpdate,
|
||||
}: QueryVariableInputProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
value,
|
||||
defaultValue,
|
||||
enableSelectAll,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
});
|
||||
|
||||
const validVariableUpdate = (): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const getOptions = (variablesRes: VariableResponseProps | null): void => {
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
let valueNotInList = false;
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
if (!newOptionsData.includes(val)) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (
|
||||
isString(variableData.selectedValue) &&
|
||||
!newOptionsData.includes(variableData.selectedValue)
|
||||
) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData.name && variableData.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
variableData.type === 'QUERY' &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
error: string;
|
||||
};
|
||||
}) => {
|
||||
const { details } = error;
|
||||
|
||||
if (details.error) {
|
||||
let message = details.error;
|
||||
if ((details.error ?? '').toString().includes('Syntax error:')) {
|
||||
message =
|
||||
'Please make sure query is valid and dependent variables are selected';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleRetry = useCallback((): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}, [refetch]);
|
||||
|
||||
return (
|
||||
<SelectVariableInput
|
||||
variableId={variableData.id}
|
||||
options={optionsData}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
onClear={handleClear}
|
||||
enableSelectAll={enableSelectAll}
|
||||
defaultValue={defaultValue}
|
||||
isMultiSelect={variableData.multiSelect}
|
||||
// query variable specific, API related props
|
||||
loading={isLoading}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={handleRetry}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(QueryVariableInput);
|
||||
@@ -0,0 +1,134 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Popover, Tooltip, Typography } from 'antd';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ALL_SELECT_VALUE } from '../utils';
|
||||
import { SelectItemStyle } from './styles';
|
||||
|
||||
const errorIconStyle = { margin: '0 0.5rem' };
|
||||
|
||||
interface SelectVariableInputProps {
|
||||
variableId: string;
|
||||
options: (string | number | boolean)[];
|
||||
value: string | string[] | undefined;
|
||||
enableSelectAll: boolean;
|
||||
isMultiSelect: boolean;
|
||||
onChange: (value: string | string[]) => void;
|
||||
onClear: () => void;
|
||||
defaultValue?: string | string[];
|
||||
onDropdownVisibleChange?: (visible: boolean) => void;
|
||||
loading?: boolean;
|
||||
errorMessage?: string | null;
|
||||
onRetry?: () => void;
|
||||
}
|
||||
|
||||
const MAX_TAG_DISPLAY_VALUES = 10;
|
||||
|
||||
function maxTagPlaceholder(
|
||||
omittedValues: { label?: React.ReactNode; value?: string | number }[],
|
||||
): JSX.Element {
|
||||
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
|
||||
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
|
||||
const tooltipText =
|
||||
valuesToShow.map(({ value: v }) => v ?? '').join(', ') +
|
||||
(hasMore ? ` + ${omittedValues.length - MAX_TAG_DISPLAY_VALUES} more` : '');
|
||||
|
||||
return (
|
||||
<Tooltip title={tooltipText}>
|
||||
<span>+ {omittedValues.length} </span>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
function SelectVariableInput({
|
||||
variableId,
|
||||
options,
|
||||
value,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
onClear,
|
||||
loading,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
enableSelectAll,
|
||||
isMultiSelect,
|
||||
defaultValue,
|
||||
}: SelectVariableInputProps): JSX.Element {
|
||||
const selectOptions = useMemo(
|
||||
() =>
|
||||
options.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
})),
|
||||
[options],
|
||||
);
|
||||
|
||||
const commonProps = useMemo(
|
||||
() => ({
|
||||
// main props
|
||||
key: variableId,
|
||||
value,
|
||||
defaultValue,
|
||||
|
||||
// setup props
|
||||
placeholder: 'Select value',
|
||||
className: 'variable-select',
|
||||
popupClassName: 'dropdown-styles',
|
||||
getPopupContainer: popupContainer,
|
||||
style: SelectItemStyle,
|
||||
showSearch: true,
|
||||
bordered: false,
|
||||
|
||||
// dynamic props
|
||||
'data-testid': 'variable-select',
|
||||
onChange,
|
||||
loading,
|
||||
options: selectOptions,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
}),
|
||||
[
|
||||
variableId,
|
||||
defaultValue,
|
||||
onChange,
|
||||
loading,
|
||||
selectOptions,
|
||||
value,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
{isMultiSelect ? (
|
||||
<CustomMultiSelect
|
||||
{...commonProps}
|
||||
placement="bottomLeft"
|
||||
maxTagCount={2}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
maxTagPlaceholder={maxTagPlaceholder}
|
||||
onClear={onClear}
|
||||
enableAllSelection={enableSelectAll}
|
||||
maxTagTextLength={30}
|
||||
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
|
||||
/>
|
||||
) : (
|
||||
<CustomSelect {...commonProps} />
|
||||
)}
|
||||
|
||||
{errorMessage && (
|
||||
<span style={errorIconStyle}>
|
||||
<Popover placement="top" content={<Typography>{errorMessage}</Typography>}>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(SelectVariableInput);
|
||||
@@ -1,16 +1,34 @@
|
||||
import { memo, useCallback, useRef, useState } from 'react';
|
||||
import { Input, InputRef } from 'antd';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
interface TextboxVariableInputProps {
|
||||
variableData: IDashboardVariable;
|
||||
handleChange: (inputValue: string) => void;
|
||||
}
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
|
||||
type TextboxVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function TextboxVariableInput({
|
||||
variableData,
|
||||
handleChange,
|
||||
onValueUpdate,
|
||||
}: TextboxVariableInputProps): JSX.Element {
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
if (inputValue === variableData.selectedValue) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
onValueUpdate(variableData.name, variableData.id, inputValue, false);
|
||||
}
|
||||
},
|
||||
[
|
||||
onValueUpdate,
|
||||
variableData.id,
|
||||
variableData.name,
|
||||
variableData.selectedValue,
|
||||
],
|
||||
);
|
||||
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
@@ -50,6 +68,7 @@ function TextboxVariableInput({
|
||||
|
||||
return (
|
||||
<Input
|
||||
key={variableData.id}
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
|
||||
@@ -1,36 +1,16 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { memo, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
|
||||
import { Popover, Tooltip, Typography } from 'antd';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { memo } from 'react';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ALL_SELECT_VALUE, variablePropsToPayloadVariables } from '../utils';
|
||||
import { SelectItemStyle } from './styles';
|
||||
import CustomVariableInput from './CustomVariableInput';
|
||||
import QueryVariableInput from './QueryVariableInput';
|
||||
import TextboxVariableInput from './TextboxVariableInput';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
interface VariableItemProps {
|
||||
export interface VariableItemProps {
|
||||
variableData: IDashboardVariable;
|
||||
existingVariables: Record<string, IDashboardVariable>;
|
||||
onValueUpdate: (
|
||||
@@ -44,445 +24,49 @@ interface VariableItemProps {
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
export const getSelectValue = (
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
variableData: IDashboardVariable,
|
||||
): string | string[] | undefined => {
|
||||
if (Array.isArray(selectedValue)) {
|
||||
if (!variableData.multiSelect && selectedValue.length === 1) {
|
||||
return selectedValue[0]?.toString();
|
||||
}
|
||||
return selectedValue.map((item) => item.toString());
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function VariableItem({
|
||||
variableData,
|
||||
existingVariables,
|
||||
onValueUpdate,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
const [tempSelection, setTempSelection] = useState<
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const validVariableUpdate = (): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// variableData.name is present as the top element or next in the queue - variablesToGetUpdated
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
};
|
||||
|
||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const getOptions = (variablesRes: VariableResponseProps | null): void => {
|
||||
if (variablesRes && variableData.type === 'QUERY') {
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
/* eslint-disable no-useless-escape */
|
||||
|
||||
let valueNotInList = false;
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
const isUsed = newOptionsData.includes(val);
|
||||
|
||||
if (!isUsed) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (isString(variableData.selectedValue)) {
|
||||
const isUsed = newOptionsData.includes(variableData.selectedValue);
|
||||
|
||||
if (!isUsed) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.type === 'QUERY' &&
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData && variableData?.name && variableData?.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
} else if (variableData.type === 'CUSTOM') {
|
||||
const optionsData = sortValues(
|
||||
commaValuesParser(variableData.customValue || ''),
|
||||
variableData.sort,
|
||||
) as never;
|
||||
|
||||
setOptionsData(optionsData);
|
||||
}
|
||||
};
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
variableData.type === 'QUERY' &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
error: string;
|
||||
};
|
||||
}) => {
|
||||
const { details } = error;
|
||||
|
||||
if (details.error) {
|
||||
let message = details.error;
|
||||
if ((details.error ?? '').toString().includes('Syntax error:')) {
|
||||
message =
|
||||
'Please make sure query is valid and dependent variables are selected';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
|
||||
if (
|
||||
value === variableData.selectedValue ||
|
||||
(Array.isArray(value) &&
|
||||
Array.isArray(variableData.selectedValue) &&
|
||||
areArraysEqual(value, variableData.selectedValue))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
// Check if ALL is effectively selected by comparing with available options
|
||||
const isAllSelected =
|
||||
Array.isArray(value) &&
|
||||
value.length > 0 &&
|
||||
optionsData.every((option) => value.includes(option.toString()));
|
||||
|
||||
if (isAllSelected && variableData.showALLOption) {
|
||||
// For ALL selection, pass null to avoid storing values
|
||||
onValueUpdate(variableData.name, variableData.id, optionsData, true);
|
||||
} else {
|
||||
onValueUpdate(variableData.name, variableData.id, value, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
variableData.multiSelect,
|
||||
variableData.selectedValue,
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
onValueUpdate,
|
||||
optionsData,
|
||||
variableData.showALLOption,
|
||||
],
|
||||
);
|
||||
|
||||
// Add a handler for tracking temporary selection changes
|
||||
const handleTempChange = (inputValue: string | string[]): void => {
|
||||
// Store the selection in temporary state while dropdown is open
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
setTempSelection(value);
|
||||
};
|
||||
|
||||
// Handle dropdown visibility changes
|
||||
const handleDropdownVisibleChange = (visible: boolean): void => {
|
||||
// Initialize temp selection when opening dropdown
|
||||
if (visible) {
|
||||
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
|
||||
}
|
||||
// Apply changes when closing dropdown
|
||||
else if (!visible && tempSelection !== undefined) {
|
||||
// Call handleChange with the temporarily stored selection
|
||||
handleChange(tempSelection);
|
||||
setTempSelection(undefined);
|
||||
}
|
||||
};
|
||||
|
||||
// do not debounce the above function as we do not need debounce in select variables
|
||||
|
||||
const { selectedValue } = variableData;
|
||||
const selectedValueStringified = useMemo(
|
||||
() => getSelectValue(selectedValue, variableData),
|
||||
[selectedValue, variableData],
|
||||
);
|
||||
|
||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||
|
||||
const selectValue =
|
||||
variableData.allSelected && enableSelectAll
|
||||
? 'ALL'
|
||||
: selectedValueStringified;
|
||||
|
||||
// Apply default value on first render if no selection exists
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const finalSelectedValues = useMemo(() => {
|
||||
if (variableData.multiSelect) {
|
||||
let value = tempSelection || selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
if (variableData.showALLOption) {
|
||||
if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData;
|
||||
}
|
||||
} else if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData?.[0];
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
if (isEmpty(selectedValue)) {
|
||||
if (variableData.defaultValue) {
|
||||
return variableData.defaultValue;
|
||||
}
|
||||
return optionsData[0]?.toString();
|
||||
}
|
||||
|
||||
return selectedValue;
|
||||
}, [
|
||||
variableData.multiSelect,
|
||||
variableData.showALLOption,
|
||||
variableData.defaultValue,
|
||||
selectedValue,
|
||||
tempSelection,
|
||||
optionsData,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
(variableData.multiSelect && !(tempSelection || selectValue)) ||
|
||||
isEmpty(selectValue)
|
||||
) {
|
||||
handleChange(finalSelectedValues as string[] | string);
|
||||
}
|
||||
}, [
|
||||
finalSelectedValues,
|
||||
handleChange,
|
||||
selectValue,
|
||||
tempSelection,
|
||||
variableData.multiSelect,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch options for CUSTOM Type
|
||||
if (variableData.type === 'CUSTOM') {
|
||||
getOptions(null);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variableData.type, variableData.customValue]);
|
||||
const { name, description, type: variableType } = variableData;
|
||||
|
||||
return (
|
||||
<div className="variable-item">
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
{variableData.description && (
|
||||
<Tooltip title={variableData.description}>
|
||||
${name}
|
||||
{description && (
|
||||
<Tooltip title={description}>
|
||||
<InfoCircleOutlined className="info-icon" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
<div className="variable-value">
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
{variableType === 'TEXTBOX' && (
|
||||
<TextboxVariableInput
|
||||
variableData={variableData}
|
||||
handleChange={handleChange}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
) : (
|
||||
optionsData &&
|
||||
(variableData.multiSelect ? (
|
||||
<CustomMultiSelect
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
options={optionsData.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
}))}
|
||||
defaultValue={variableData.defaultValue || selectValue}
|
||||
onChange={handleTempChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
placement="bottomLeft"
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
popupClassName="dropdown-styles"
|
||||
maxTagCount={2}
|
||||
getPopupContainer={popupContainer}
|
||||
value={tempSelection || selectValue}
|
||||
onDropdownVisibleChange={handleDropdownVisibleChange}
|
||||
errorMessage={errorMessage}
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
maxTagPlaceholder={(omittedValues): JSX.Element => {
|
||||
const maxDisplayValues = 10;
|
||||
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
|
||||
const hasMore = omittedValues.length > maxDisplayValues;
|
||||
const tooltipText =
|
||||
valuesToShow.map(({ value }) => value).join(', ') +
|
||||
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
|
||||
|
||||
return (
|
||||
<Tooltip title={tooltipText}>
|
||||
<span>+ {omittedValues.length} </span>
|
||||
</Tooltip>
|
||||
);
|
||||
}}
|
||||
onClear={(): void => {
|
||||
handleChange([]);
|
||||
}}
|
||||
enableAllSelection={enableSelectAll}
|
||||
maxTagTextLength={30}
|
||||
allowClear={selectValue !== ALL_SELECT_VALUE && selectValue !== 'ALL'}
|
||||
onRetry={(): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<CustomSelect
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
defaultValue={variableData.defaultValue || selectValue}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
popupClassName="dropdown-styles"
|
||||
getPopupContainer={popupContainer}
|
||||
options={optionsData.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
}))}
|
||||
value={selectValue}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={(): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
{variableData.type !== 'TEXTBOX' && errorMessage && (
|
||||
<span style={{ margin: '0 0.5rem' }}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<Typography>{errorMessage}</Typography>}
|
||||
>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
{variableType === 'CUSTOM' && (
|
||||
<CustomVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
)}
|
||||
{variableType === 'QUERY' && (
|
||||
<QueryVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
existingVariables={existingVariables}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,201 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { areArraysEqual, getSelectValue } from './util';
|
||||
|
||||
interface UseDashboardVariableSelectHelperParams {
|
||||
variableData: IDashboardVariable;
|
||||
optionsData: (string | number | boolean)[];
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
}
|
||||
|
||||
interface UseDashboardVariableSelectHelperReturn {
|
||||
// State
|
||||
tempSelection: string | string[] | undefined;
|
||||
setTempSelection: React.Dispatch<
|
||||
React.SetStateAction<string | string[] | undefined>
|
||||
>;
|
||||
value: string | string[] | undefined;
|
||||
defaultValue: string | string[] | undefined;
|
||||
|
||||
// Derived values
|
||||
enableSelectAll: boolean;
|
||||
|
||||
// Handlers
|
||||
onChange: (value: string | string[]) => void;
|
||||
onDropdownVisibleChange: (visible: boolean) => void;
|
||||
handleClear: () => void;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
|
||||
const { selectedValue } = variableData;
|
||||
|
||||
const [tempSelection, setTempSelection] = useState<
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
const selectedValueStringified = useMemo(
|
||||
() => getSelectValue(selectedValue, variableData),
|
||||
[selectedValue, variableData],
|
||||
);
|
||||
|
||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||
|
||||
const selectValue =
|
||||
variableData.allSelected && enableSelectAll
|
||||
? 'ALL'
|
||||
: selectedValueStringified;
|
||||
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
|
||||
if (
|
||||
value === variableData.selectedValue ||
|
||||
(Array.isArray(value) &&
|
||||
Array.isArray(variableData.selectedValue) &&
|
||||
areArraysEqual(value, variableData.selectedValue))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
// Check if ALL is effectively selected by comparing with available options
|
||||
const isAllSelected =
|
||||
Array.isArray(value) &&
|
||||
value.length > 0 &&
|
||||
optionsData.every((option) => value.includes(option.toString()));
|
||||
|
||||
if (isAllSelected && variableData.showALLOption) {
|
||||
// For ALL selection, pass optionsData as the value and set allSelected to true
|
||||
onValueUpdate(variableData.name, variableData.id, optionsData, true);
|
||||
} else {
|
||||
onValueUpdate(variableData.name, variableData.id, value, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
variableData.multiSelect,
|
||||
variableData.selectedValue,
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
variableData.showALLOption,
|
||||
onValueUpdate,
|
||||
optionsData,
|
||||
],
|
||||
);
|
||||
|
||||
const handleTempChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
// Store the selection in temporary state while dropdown is open
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
setTempSelection(value);
|
||||
},
|
||||
[variableData.multiSelect],
|
||||
);
|
||||
|
||||
// Apply default value on first render if no selection exists
|
||||
const finalSelectedValues = useMemo(() => {
|
||||
if (variableData.multiSelect) {
|
||||
let value = tempSelection || selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
if (variableData.showALLOption) {
|
||||
if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData;
|
||||
}
|
||||
} else if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData?.[0];
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
if (isEmpty(selectedValue)) {
|
||||
if (variableData.defaultValue) {
|
||||
return variableData.defaultValue;
|
||||
}
|
||||
return optionsData[0]?.toString();
|
||||
}
|
||||
|
||||
return selectedValue;
|
||||
}, [
|
||||
variableData.multiSelect,
|
||||
variableData.showALLOption,
|
||||
variableData.defaultValue,
|
||||
selectedValue,
|
||||
tempSelection,
|
||||
optionsData,
|
||||
]);
|
||||
|
||||
// Apply default values when needed
|
||||
useEffect(() => {
|
||||
if (
|
||||
(variableData.multiSelect && !(tempSelection || selectValue)) ||
|
||||
isEmpty(selectValue)
|
||||
) {
|
||||
handleChange(finalSelectedValues as string[] | string);
|
||||
}
|
||||
}, [
|
||||
finalSelectedValues,
|
||||
handleChange,
|
||||
selectValue,
|
||||
tempSelection,
|
||||
variableData.multiSelect,
|
||||
]);
|
||||
|
||||
// Handle dropdown visibility changes
|
||||
const onDropdownVisibleChange = useCallback(
|
||||
(visible: boolean): void => {
|
||||
// Initialize temp selection when opening dropdown
|
||||
if (visible) {
|
||||
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
|
||||
}
|
||||
// Apply changes when closing dropdown
|
||||
else if (!visible && tempSelection !== undefined) {
|
||||
// Call handleChange with the temporarily stored selection
|
||||
handleChange(tempSelection);
|
||||
setTempSelection(undefined);
|
||||
}
|
||||
},
|
||||
[variableData, tempSelection, handleChange],
|
||||
);
|
||||
|
||||
const handleClear = useCallback((): void => {
|
||||
handleChange([]);
|
||||
}, [handleChange]);
|
||||
|
||||
const value = variableData.multiSelect
|
||||
? tempSelection || selectValue
|
||||
: selectValue;
|
||||
|
||||
const defaultValue = variableData.defaultValue || selectValue;
|
||||
|
||||
const onChange = useMemo(() => {
|
||||
return variableData.multiSelect ? handleTempChange : handleChange;
|
||||
}, [variableData.multiSelect, handleTempChange, handleChange]);
|
||||
|
||||
return {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
enableSelectAll,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
value,
|
||||
defaultValue,
|
||||
onChange,
|
||||
};
|
||||
}
|
||||
@@ -381,3 +381,16 @@ export const uniqueValues = (values: string[] | string): string[] | string => {
|
||||
|
||||
return values;
|
||||
};
|
||||
|
||||
export const getSelectValue = (
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
variableData: IDashboardVariable,
|
||||
): string | string[] | undefined => {
|
||||
if (Array.isArray(selectedValue)) {
|
||||
if (!variableData.multiSelect && selectedValue.length === 1) {
|
||||
return selectedValue[0]?.toString();
|
||||
}
|
||||
return selectedValue.map((item) => item.toString());
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
package fields
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type API struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore
|
||||
}
|
||||
|
||||
// TODO: move this to module and remove metastore init
|
||||
func NewAPI(
|
||||
settings factory.ProviderSettings,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) *API {
|
||||
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
|
||||
settings,
|
||||
telemetryStore,
|
||||
telemetrytraces.DBName,
|
||||
telemetrytraces.TagAttributesV2TableName,
|
||||
telemetrytraces.SpanAttributesKeysTblName,
|
||||
telemetrytraces.SpanIndexV3TableName,
|
||||
telemetrymetrics.DBName,
|
||||
telemetrymetrics.AttributesMetadataTableName,
|
||||
telemetrymeter.DBName,
|
||||
telemetrymeter.SamplesAgg1dTableName,
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2TableName,
|
||||
telemetrylogs.TagAttributesV2TableName,
|
||||
telemetrylogs.LogAttributeKeysTblName,
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
)
|
||||
|
||||
return &API{
|
||||
telemetryStore: telemetryStore,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
}
|
||||
}
|
||||
|
||||
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
type fieldKeysResponse struct {
|
||||
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
|
||||
Complete bool `json:"complete"`
|
||||
}
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
ctx := r.Context()
|
||||
|
||||
fieldKeySelector, err := parseFieldKeyRequest(r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
response := fieldKeysResponse{
|
||||
Keys: keys,
|
||||
Complete: complete,
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
type fieldValuesResponse struct {
|
||||
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
|
||||
Complete bool `json:"complete"`
|
||||
}
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
ctx := r.Context()
|
||||
|
||||
fieldValueSelector, err := parseFieldValueRequest(r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
// we don't want to return error if we fail to get related values for some reason
|
||||
relatedValues = []string{}
|
||||
}
|
||||
|
||||
values := &telemetrytypes.TelemetryFieldValues{
|
||||
StringValues: allValues.StringValues,
|
||||
NumberValues: allValues.NumberValues,
|
||||
RelatedValues: relatedValues,
|
||||
}
|
||||
|
||||
response := fieldValuesResponse{
|
||||
Values: values,
|
||||
Complete: allComplete && relatedComplete,
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, response)
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
package fields
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
|
||||
var req telemetrytypes.FieldKeySelector
|
||||
var signal telemetrytypes.Signal
|
||||
var source telemetrytypes.Source
|
||||
var err error
|
||||
|
||||
signalStr := r.URL.Query().Get("signal")
|
||||
if signalStr != "" {
|
||||
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
|
||||
} else {
|
||||
signal = telemetrytypes.SignalUnspecified
|
||||
}
|
||||
|
||||
sourceStr := r.URL.Query().Get("source")
|
||||
if sourceStr != "" {
|
||||
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
|
||||
} else {
|
||||
source = telemetrytypes.SourceUnspecified
|
||||
}
|
||||
|
||||
if r.URL.Query().Get("limit") != "" {
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
|
||||
}
|
||||
req.Limit = limit
|
||||
} else {
|
||||
req.Limit = 1000
|
||||
}
|
||||
|
||||
var startUnixMilli, endUnixMilli int64
|
||||
|
||||
if r.URL.Query().Get("startUnixMilli") != "" {
|
||||
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
|
||||
}
|
||||
// Round down to the nearest 6 hours (21600000 milliseconds)
|
||||
startUnixMilli -= startUnixMilli % 21600000
|
||||
}
|
||||
if r.URL.Query().Get("endUnixMilli") != "" {
|
||||
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
|
||||
}
|
||||
}
|
||||
|
||||
// Parse fieldContext directly instead of using JSON unmarshalling.
|
||||
var fieldContext telemetrytypes.FieldContext
|
||||
fieldContextStr := r.URL.Query().Get("fieldContext")
|
||||
if fieldContextStr != "" {
|
||||
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
|
||||
}
|
||||
|
||||
// Parse fieldDataType directly instead of using JSON unmarshalling.
|
||||
var fieldDataType telemetrytypes.FieldDataType
|
||||
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
|
||||
if fieldDataTypeStr != "" {
|
||||
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
|
||||
}
|
||||
|
||||
metricName := r.URL.Query().Get("metricName")
|
||||
var metricContext *telemetrytypes.MetricContext
|
||||
if metricName != "" {
|
||||
metricContext = &telemetrytypes.MetricContext{
|
||||
MetricName: metricName,
|
||||
}
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("searchText")
|
||||
|
||||
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
|
||||
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
|
||||
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
|
||||
name = parsedFieldKey.Name
|
||||
fieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
req = telemetrytypes.FieldKeySelector{
|
||||
StartUnixMilli: startUnixMilli,
|
||||
EndUnixMilli: endUnixMilli,
|
||||
Signal: signal,
|
||||
Source: source,
|
||||
Name: name,
|
||||
FieldContext: fieldContext,
|
||||
FieldDataType: fieldDataType,
|
||||
Limit: req.Limit,
|
||||
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
|
||||
MetricContext: metricContext,
|
||||
}
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
|
||||
keySelector, err := parseFieldKeyRequest(r)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("name")
|
||||
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
|
||||
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
|
||||
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
|
||||
name = parsedFieldKey.Name
|
||||
keySelector.FieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
keySelector.Name = name
|
||||
existingQuery := r.URL.Query().Get("existingQuery")
|
||||
value := r.URL.Query().Get("searchText")
|
||||
|
||||
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil {
|
||||
limit = 50
|
||||
}
|
||||
|
||||
req := telemetrytypes.FieldValueSelector{
|
||||
FieldKeySelector: keySelector,
|
||||
ExistingQuery: existingQuery,
|
||||
Value: value,
|
||||
Limit: limit,
|
||||
}
|
||||
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
|
||||
if ctx == telemetrytypes.FieldContextResource ||
|
||||
ctx == telemetrytypes.FieldContextAttribute ||
|
||||
ctx == telemetrytypes.FieldContextScope {
|
||||
return true
|
||||
}
|
||||
|
||||
switch signal.StringValue() {
|
||||
case telemetrytypes.SignalLogs.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
|
||||
case telemetrytypes.SignalTraces.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
|
||||
case telemetrytypes.SignalMetrics.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextMetric
|
||||
}
|
||||
return true
|
||||
}
|
||||
50
pkg/apiserver/signozapiserver/fields.go
Normal file
50
pkg/apiserver/signozapiserver/fields.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
|
||||
ID: "GetFieldsKeys",
|
||||
Tags: []string{"fields"},
|
||||
Summary: "Get field keys",
|
||||
Description: "This endpoint returns field keys",
|
||||
Request: nil,
|
||||
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
|
||||
RequestContentType: "",
|
||||
Response: new(telemetrytypes.GettableFieldKeys),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
|
||||
ID: "GetFieldsValues",
|
||||
Tags: []string{"fields"},
|
||||
Summary: "Get field values",
|
||||
Description: "This endpoint returns field values",
|
||||
Request: nil,
|
||||
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
|
||||
RequestContentType: "",
|
||||
Response: new(telemetrytypes.GettableFieldValues),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
@@ -44,6 +45,7 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
roleGetter role.Getter
|
||||
roleHandler role.Handler
|
||||
fieldsHandler fields.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -63,9 +65,31 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
roleGetter role.Getter,
|
||||
roleHandler role.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
|
||||
return newProvider(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config,
|
||||
orgGetter,
|
||||
authz,
|
||||
orgHandler,
|
||||
userHandler,
|
||||
sessionHandler,
|
||||
authDomainHandler,
|
||||
preferenceHandler,
|
||||
globalHandler,
|
||||
promoteHandler,
|
||||
flaggerHandler,
|
||||
dashboardModule,
|
||||
dashboardHandler,
|
||||
metricsExplorerHandler,
|
||||
gatewayHandler,
|
||||
roleGetter,
|
||||
roleHandler,
|
||||
fieldsHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -89,6 +113,7 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
roleGetter role.Getter,
|
||||
roleHandler role.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -111,6 +136,7 @@ func newProvider(
|
||||
gatewayHandler: gatewayHandler,
|
||||
roleGetter: roleGetter,
|
||||
roleHandler: roleHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
|
||||
@@ -175,6 +201,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addFieldsRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -325,7 +325,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnprocessableEntity},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
|
||||
11
pkg/modules/fields/handler.go
Normal file
11
pkg/modules/fields/handler.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package fields
|
||||
|
||||
import "net/http"
|
||||
|
||||
type Handler interface {
|
||||
// Gets the fields keys for the given field key selector
|
||||
GetFieldsKeys(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Gets the fields values for the given field value selector
|
||||
GetFieldsValues(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
79
pkg/modules/fields/implfields/handler.go
Normal file
79
pkg/modules/fields/implfields/handler.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package implfields
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore
|
||||
}
|
||||
|
||||
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
|
||||
return &handler{
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
var params telemetrytypes.PostableFieldKeysParams
|
||||
if err := binding.Query.BindQuery(req.URL.Query(), ¶ms); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
|
||||
|
||||
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
|
||||
Keys: keys,
|
||||
Complete: complete,
|
||||
})
|
||||
}
|
||||
|
||||
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
var params telemetrytypes.PostableFieldValueParams
|
||||
if err := binding.Query.BindQuery(req.URL.Query(), ¶ms); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
|
||||
|
||||
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
// we don't want to return error if we fail to get related values for some reason
|
||||
relatedValues = []string{}
|
||||
}
|
||||
|
||||
values := &telemetrytypes.TelemetryFieldValues{
|
||||
StringValues: allValues.StringValues,
|
||||
NumberValues: allValues.NumberValues,
|
||||
RelatedValues: relatedValues,
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
|
||||
Values: values,
|
||||
Complete: allComplete && relatedComplete,
|
||||
})
|
||||
}
|
||||
@@ -369,7 +369,7 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
|
||||
|
||||
func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error {
|
||||
if !module.config.Password.Reset.AllowSelf {
|
||||
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "users are not allowed to reset their password themselves, please contact an admin to reset your password")
|
||||
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
|
||||
}
|
||||
|
||||
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)
|
||||
|
||||
@@ -25,7 +25,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
@@ -145,8 +144,6 @@ type APIHandler struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
FieldsAPI *fields.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
@@ -177,8 +174,6 @@ type APIHandlerOpts struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
FieldsAPI *fields.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
@@ -243,7 +238,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
LicensingAPI: opts.LicensingAPI,
|
||||
Signoz: opts.Signoz,
|
||||
FieldsAPI: opts.FieldsAPI,
|
||||
QuerierAPI: opts.QuerierAPI,
|
||||
QueryParserAPI: opts.QueryParserAPI,
|
||||
}
|
||||
@@ -399,13 +393,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v1").Subrouter()
|
||||
|
||||
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
|
||||
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
|
||||
|
||||
@@ -17,7 +17,6 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: nooplicensing.NewLicenseAPI(),
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
@@ -215,7 +213,6 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
|
||||
api.RegisterLogsRoutes(r, am)
|
||||
api.RegisterIntegrationRoutes(r, am)
|
||||
api.RegisterCloudIntegrationsRoutes(r, am)
|
||||
api.RegisterFieldsRoutes(r, am)
|
||||
api.RegisterQueryRangeV3Routes(r, am)
|
||||
api.RegisterInfraMetricsRoutes(r, am)
|
||||
api.RegisterWebSocketPaths(r, am)
|
||||
|
||||
@@ -71,7 +71,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
// accustom log filters like `body.log.message EXISTS` into EXPR language
|
||||
// where User is attempting to check for keys present in JSON log body
|
||||
if strings.HasPrefix(v.Key.Key, "body.") {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
// if body is a string and is a valid JSON, then check if the key exists in the JSON
|
||||
filter = fmt.Sprintf(`((type(body) == "string" && isJSON(body)) && %s %s %s)`, exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
|
||||
// if body is a map, then check if the key exists in the map
|
||||
operator := v3.FilterOperatorNotEqual
|
||||
if v.Operator == v3.FilterOperatorNotExists {
|
||||
operator = v3.FilterOperatorEqual
|
||||
}
|
||||
nilCheckFilter := fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
|
||||
|
||||
// join the two filters with OR
|
||||
filter = fmt.Sprintf(`(%s or (type(body) == "map" && (%s)))`, filter, nilCheckFilter)
|
||||
} else if typ := getTypeName(v.Key.Type); typ != "" {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
|
||||
} else {
|
||||
|
||||
@@ -3,203 +3,538 @@ package queryBuilderToExpr
|
||||
import (
|
||||
"testing"
|
||||
|
||||
signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/expr-lang/expr/vm"
|
||||
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
Expr string
|
||||
ExpectError bool
|
||||
}{
|
||||
{
|
||||
Name: "equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
|
||||
}},
|
||||
Expr: `attributes["key"] == "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "not equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
|
||||
}},
|
||||
Expr: `attributes["key"] != "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "less than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
|
||||
},
|
||||
{
|
||||
Name: "less than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
|
||||
},
|
||||
// case sensitive
|
||||
{
|
||||
Name: "body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `"log.message" in fromJSON(body)`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"log.message" not in fromJSON(body)`,
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && body not matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "regex with escape characters",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
|
||||
},
|
||||
{
|
||||
Name: "invalid regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
|
||||
ExpectError: true,
|
||||
},
|
||||
{
|
||||
Name: "in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{1, 2, 3, 4}, Operator: "in"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
|
||||
},
|
||||
{
|
||||
Name: "not in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"1", "2"}, Operator: "nin"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
|
||||
},
|
||||
{
|
||||
Name: "exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
Expr: `"key" in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "incorrect multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
|
||||
ExpectError: true,
|
||||
},
|
||||
}
|
||||
func TestParseExpression(t *testing.T) {
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
Expr string
|
||||
ExpectError bool
|
||||
}{
|
||||
{
|
||||
Name: "equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
|
||||
}},
|
||||
Expr: `attributes["key"] == "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "not equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
|
||||
}},
|
||||
Expr: `attributes["key"] != "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "less than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
|
||||
},
|
||||
{
|
||||
Name: "less than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
|
||||
},
|
||||
// case sensitive
|
||||
{
|
||||
Name: "body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" in fromJSON(body)) or (type(body) == "map" && (body.log.message != nil)))`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" not in fromJSON(body)) or (type(body) == "map" && (body.log.message == nil)))`,
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && body not matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "regex with escape characters",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
|
||||
},
|
||||
{
|
||||
Name: "invalid regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
|
||||
ExpectError: true,
|
||||
},
|
||||
{
|
||||
Name: "in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{1, 2, 3, 4}, Operator: "in"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
|
||||
},
|
||||
{
|
||||
Name: "not in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"1", "2"}, Operator: "nin"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
|
||||
},
|
||||
{
|
||||
Name: "exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
Expr: `"key" in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "incorrect multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
|
||||
ExpectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
for _, tt := range testCases {
|
||||
Convey(tt.Name, t, func() {
|
||||
t.Run(tt.Name, func(t *testing.T) {
|
||||
x, err := Parse(tt.Query)
|
||||
if tt.ExpectError {
|
||||
So(err, ShouldNotBeNil)
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
So(err, ShouldBeNil)
|
||||
So(x, ShouldEqual, tt.Expr)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.Expr, x)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type EntryComposite struct {
|
||||
ID int
|
||||
*entry.Entry
|
||||
}
|
||||
|
||||
// makeEntry creates an EntryComposite for tests. Pass nil for traceID/spanID to mean "not set".
|
||||
func makeEntry(id int, body any, attributes, resource map[string]any, traceID, spanID []byte) EntryComposite {
|
||||
e := entry.New()
|
||||
e.Body = body
|
||||
if attributes != nil {
|
||||
e.Attributes = attributes
|
||||
} else {
|
||||
e.Attributes = make(map[string]any)
|
||||
}
|
||||
if resource != nil {
|
||||
e.Resource = resource
|
||||
} else {
|
||||
e.Resource = make(map[string]any)
|
||||
}
|
||||
if traceID != nil {
|
||||
e.TraceID = traceID
|
||||
}
|
||||
if spanID != nil {
|
||||
e.SpanID = spanID
|
||||
}
|
||||
return EntryComposite{ID: id, Entry: e}
|
||||
}
|
||||
|
||||
func TestExpressionVSEntry(t *testing.T) {
|
||||
// Dataset: entries with varied body (JSON and plain text), attributes, trace_id, span_id for filter testing.
|
||||
// IDs 0..12: JSON bodies (body.msg / body.log etc. work). IDs 13..17: simple text log bodies.
|
||||
dataset := []EntryComposite{
|
||||
// JSON body entries (0-12)
|
||||
makeEntry(0, `{"msg":"hello world"}`, map[string]any{"level": "info"}, map[string]any{"env": "prod", "host": "node-0"}, nil, nil),
|
||||
makeEntry(1, `{"msg":"error occurred", "missing": "value"}`, map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-1"}, []byte("trace1"), []byte("span1")),
|
||||
makeEntry(2, `{"msg":"checkbody substring"}`, map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-2"}, []byte("trace2"), nil),
|
||||
makeEntry(3, `{"msg":"no match here"}`, map[string]any{"level": "debug"}, map[string]any{"env": "staging", "host": "node-3"}, nil, []byte("span3")),
|
||||
makeEntry(4, `{"msg":"101regex suffix"}`, map[string]any{"code": "200", "count": int64(5)}, map[string]any{"env": "prod", "host": "node-4"}, nil, nil),
|
||||
makeEntry(5, `{"msg":"plain text only"}`, map[string]any{"code": "404", "count": int64(10)}, map[string]any{"env": "prod", "host": "node-5"}, []byte("trace5"), []byte("span5")),
|
||||
makeEntry(6, `{"log":{"message":"user login"}}`, map[string]any{"service": "auth"}, map[string]any{"env": "dev", "host": "node-6"}, nil, nil),
|
||||
makeEntry(7, `{"log":{"message":"user logout"}}`, map[string]any{"service": "auth", "user_id": "u1"}, map[string]any{"env": "dev", "host": "node-7"}, []byte("trace7"), nil),
|
||||
makeEntry(8, `{"event":"click"}`, map[string]any{"service": "api"}, map[string]any{"env": "dev", "host": "node-8"}, nil, nil),
|
||||
makeEntry(9, `{"msg":"checkbody"}`, map[string]any{"tag": "exact", "num": int64(9)}, map[string]any{"env": "prod", "host": "node-9"}, nil, nil),
|
||||
makeEntry(10, `{"msg":"CHECKBODY case"}`, map[string]any{"tag": "case", "num": int64(10)}, map[string]any{"env": "staging", "host": "node-10"}, nil, nil),
|
||||
makeEntry(11, `{"msg":"foo"}`, map[string]any{"status": "active", "score": int64(100)}, map[string]any{"env": "prod", "host": "node-11"}, nil, nil),
|
||||
makeEntry(12, `{"msg":"bar"}`, map[string]any{"status": "inactive", "score": int64(50)}, map[string]any{"env": "staging", "host": "node-12"}, []byte("trace12"), []byte("span12")),
|
||||
// Plain text log body entries (13-17)
|
||||
makeEntry(13, "Server started on port 8080", map[string]any{"component": "server"}, map[string]any{"env": "prod", "host": "node-13"}, nil, nil),
|
||||
makeEntry(14, "Connection refused to 10.0.0.1:5432", map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-14"}, nil, nil),
|
||||
makeEntry(15, "User login failed for admin", map[string]any{"service": "auth", "level": "warn"}, map[string]any{"env": "dev", "host": "node-15"}, []byte("trace15"), nil),
|
||||
makeEntry(16, "checkbody in text log", map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-16"}, nil, nil),
|
||||
makeEntry(17, "WARN: disk full on /var", map[string]any{"level": "warn"}, map[string]any{"env": "prod", "host": "node-17"}, nil, []byte("span17")),
|
||||
// Body as map (not string) entries (18-20)
|
||||
makeEntry(18, map[string]any{"msg": "checkbody substring", "level": "info"}, map[string]any{"source": "map"}, map[string]any{"env": "prod", "host": "node-18"}, nil, nil),
|
||||
makeEntry(19, map[string]any{"log": map[string]any{"message": "nested value in map body"}, "missing": true}, map[string]any{"source": "map"}, map[string]any{"env": "staging", "host": "node-19"}, []byte("trace19"), nil),
|
||||
makeEntry(20, map[string]any{"event": "deploy", "version": "1.2.0"}, map[string]any{"source": "map", "level": "info"}, map[string]any{"env": "dev", "host": "node-20"}, nil, []byte("span20")),
|
||||
}
|
||||
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
ExpectedMatches []int
|
||||
}{
|
||||
{
|
||||
Name: "resource equal (env)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 4, 5, 9, 11, 13, 14, 17, 18},
|
||||
},
|
||||
{
|
||||
Name: "resource not equal (env)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "!="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 3, 6, 7, 8, 10, 12, 15, 16, 19, 20},
|
||||
},
|
||||
{
|
||||
Name: "attribute less than (numeric)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: "<"},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "attribute greater than (numeric)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: ">"},
|
||||
}},
|
||||
ExpectedMatches: []int{5},
|
||||
},
|
||||
{
|
||||
Name: "body contains (case insensitive)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 9, 10, 16},
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 17},
|
||||
},
|
||||
{
|
||||
Name: "body.msg (case insensitive)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: false}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 9, 10, 18},
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "nregex"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
|
||||
},
|
||||
// body.log.message exists/nexists: expr checks "log.message" in fromJSON(body); nested key
|
||||
// semantics depend on signoz stanza helper. Omitted here to avoid coupling to env shape.
|
||||
{
|
||||
Name: "body top-level key exists (body.msg)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 18},
|
||||
},
|
||||
{
|
||||
Name: "body top-level key not exists (body.missing)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.missing", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 18, 20},
|
||||
},
|
||||
{
|
||||
Name: "attribute exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7, 8, 15},
|
||||
},
|
||||
{
|
||||
Name: "attribute not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20},
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 2, 5, 7, 12, 15, 19},
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 16, 17, 18, 20},
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 3, 5, 12, 17, 20},
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 4, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 18, 19},
|
||||
},
|
||||
{
|
||||
Name: "in (attribute in list)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"info", "error"}, Operator: "in"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 14, 16, 20},
|
||||
},
|
||||
{
|
||||
Name: "not in (attribute not in list)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"error", "warn"}, Operator: "nin"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 3, 16, 20},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 16},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND (two attributes)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7},
|
||||
},
|
||||
// Multi-filter variations: body + attribute, three conditions, trace/span + attribute
|
||||
{
|
||||
Name: "multi filter AND body contains + attribute",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "Connection", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{14},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND body contains + service",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "login", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 15},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND env + level (prod error)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "error", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 14},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND three conditions (staging + checkbody + info)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 16},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND trace_id exists + body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND span_id nexists + service auth",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7, 15},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND body regex + attribute",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
|
||||
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND no trace_id + no span_id + env prod",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 4, 9, 11, 13, 14, 18},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND level warn + body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "warn", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "disk", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{17},
|
||||
},
|
||||
{
|
||||
Name: "no matches (attribute value not present)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "never", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{},
|
||||
},
|
||||
{
|
||||
Name: "attribute equal and trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "404", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{5},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.Name, func(t *testing.T) {
|
||||
expression, err := Parse(tt.Query)
|
||||
assert.NoError(t, err)
|
||||
|
||||
compiled, hasBodyFieldRef, err := signozstanzahelper.ExprCompileBool(expression)
|
||||
assert.NoError(t, err)
|
||||
|
||||
matchedIDs := []int{}
|
||||
for _, d := range dataset {
|
||||
env := signozstanzahelper.GetExprEnv(d.Entry, hasBodyFieldRef)
|
||||
matches, err := vm.Run(compiled, env)
|
||||
signozstanzahelper.PutExprEnv(env)
|
||||
if err != nil {
|
||||
// Eval error (e.g. fromJSON on non-JSON body) => treat as no match
|
||||
continue
|
||||
}
|
||||
if matches != nil && matches.(bool) {
|
||||
matchedIDs = append(matchedIDs, d.ID)
|
||||
}
|
||||
}
|
||||
assert.Equal(t, tt.ExpectedMatches, matchedIDs, "query %q", tt.Name)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
@@ -28,6 +30,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
@@ -44,9 +47,19 @@ type Handlers struct {
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
Role role.Handler
|
||||
Fields fields.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
|
||||
func NewHandlers(
|
||||
modules Modules,
|
||||
providerSettings factory.ProviderSettings,
|
||||
querier querier.Querier,
|
||||
licensing licensing.Licensing,
|
||||
global global.Global,
|
||||
flaggerService flagger.Flagger,
|
||||
gatewayService gateway.Gateway,
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore,
|
||||
) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
@@ -61,5 +74,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil)
|
||||
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
@@ -52,6 +53,7 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ gateway.Handler }{},
|
||||
struct{ role.Getter }{},
|
||||
struct{ role.Handler }{},
|
||||
struct{ fields.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -249,6 +249,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.GatewayHandler,
|
||||
modules.RoleGetter,
|
||||
handlers.Role,
|
||||
handlers.Fields,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -396,7 +396,7 @@ func New(
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
|
||||
@@ -36,8 +36,8 @@ type ChangePasswordRequest struct {
|
||||
}
|
||||
|
||||
type PostableForgotPassword struct {
|
||||
OrgID valuer.UUID `json:"orgId"`
|
||||
Email valuer.Email `json:"email"`
|
||||
OrgID valuer.UUID `json:"orgId" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
FrontendBaseURL string `json:"frontendBaseURL"`
|
||||
}
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ type LimitConfig struct {
|
||||
}
|
||||
|
||||
type LimitValue struct {
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Count int64 `json:"count,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
Count int64 `json:"count"`
|
||||
}
|
||||
|
||||
type LimitMetric struct {
|
||||
|
||||
@@ -31,6 +31,13 @@ var (
|
||||
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
|
||||
)
|
||||
|
||||
func (TreemapMode) Enum() []any {
|
||||
return []any{
|
||||
TreemapModeTimeSeries,
|
||||
TreemapModeSamples,
|
||||
}
|
||||
}
|
||||
|
||||
// StatsRequest represents the payload accepted by the metrics stats endpoint.
|
||||
type StatsRequest struct {
|
||||
Filter *qbtypes.Filter `json:"filter,omitempty"`
|
||||
@@ -98,7 +105,7 @@ func (req *StatsRequest) UnmarshalJSON(data []byte) error {
|
||||
type Stat struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
TimeSeries uint64 `json:"timeseries" required:"true"`
|
||||
Samples uint64 `json:"samples" required:"true"`
|
||||
@@ -112,9 +119,9 @@ type StatsResponse struct {
|
||||
|
||||
type MetricMetadata struct {
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -131,10 +138,10 @@ func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
|
||||
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
|
||||
type UpdateMetricMetadataRequest struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Type metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
Type metrictypes.Type `json:"type" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
Unit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -144,7 +151,7 @@ type TreemapRequest struct {
|
||||
Start int64 `json:"start" required:"true"`
|
||||
End int64 `json:"end" required:"true"`
|
||||
Limit int `json:"limit" required:"true"`
|
||||
Mode TreemapMode `json:"mode" required:"true" enum:"timeseries,samples"`
|
||||
Mode TreemapMode `json:"mode" required:"true"`
|
||||
}
|
||||
|
||||
// Validate enforces basic constraints on TreemapRequest.
|
||||
|
||||
@@ -36,7 +36,7 @@ func (t Temporality) Value() (driver.Value, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Temporality) Scan(src interface{}) error {
|
||||
func (t *Temporality) Scan(src any) error {
|
||||
if src == nil {
|
||||
*t = Unknown
|
||||
return nil
|
||||
@@ -66,6 +66,14 @@ func (t *Temporality) Scan(src interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (Temporality) Enum() []any {
|
||||
return []any{
|
||||
Delta,
|
||||
Cumulative,
|
||||
Unspecified,
|
||||
}
|
||||
}
|
||||
|
||||
// Type is the type of the metric in OTLP data model
|
||||
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
|
||||
type Type struct {
|
||||
@@ -134,6 +142,16 @@ var (
|
||||
UnspecifiedType = Type{valuer.NewString("")}
|
||||
)
|
||||
|
||||
func (Type) Enum() []any {
|
||||
return []any{
|
||||
GaugeType,
|
||||
SumType,
|
||||
HistogramType,
|
||||
SummaryType,
|
||||
ExpHistogramType,
|
||||
}
|
||||
}
|
||||
|
||||
type TimeAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -151,6 +169,21 @@ var (
|
||||
TimeAggregationIncrease = TimeAggregation{valuer.NewString("increase")}
|
||||
)
|
||||
|
||||
func (TimeAggregation) Enum() []any {
|
||||
return []any{
|
||||
TimeAggregationUnspecified,
|
||||
TimeAggregationLatest,
|
||||
TimeAggregationSum,
|
||||
TimeAggregationAvg,
|
||||
TimeAggregationMin,
|
||||
TimeAggregationMax,
|
||||
TimeAggregationCount,
|
||||
TimeAggregationCountDistinct,
|
||||
TimeAggregationRate,
|
||||
TimeAggregationIncrease,
|
||||
}
|
||||
}
|
||||
|
||||
type SpaceAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -169,6 +202,22 @@ var (
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
)
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
return []any{
|
||||
SpaceAggregationUnspecified,
|
||||
SpaceAggregationSum,
|
||||
SpaceAggregationAvg,
|
||||
SpaceAggregationMin,
|
||||
SpaceAggregationMax,
|
||||
SpaceAggregationCount,
|
||||
SpaceAggregationPercentile50,
|
||||
SpaceAggregationPercentile75,
|
||||
SpaceAggregationPercentile90,
|
||||
SpaceAggregationPercentile95,
|
||||
SpaceAggregationPercentile99,
|
||||
}
|
||||
}
|
||||
|
||||
func (s SpaceAggregation) IsPercentile() bool {
|
||||
return s == SpaceAggregationPercentile50 ||
|
||||
s == SpaceAggregationPercentile75 ||
|
||||
|
||||
@@ -266,3 +266,110 @@ type FieldValueSelector struct {
|
||||
Value string `json:"value"`
|
||||
Limit int `json:"limit"`
|
||||
}
|
||||
|
||||
type GettableFieldKeys struct {
|
||||
Keys map[string][]*TelemetryFieldKey `json:"keys"`
|
||||
Complete bool `json:"complete"`
|
||||
}
|
||||
|
||||
type PostableFieldKeysParams struct {
|
||||
Signal Signal `query:"signal"`
|
||||
Source Source `query:"source"`
|
||||
Limit int `query:"limit"`
|
||||
StartUnixMilli int64 `query:"startUnixMilli"`
|
||||
EndUnixMilli int64 `query:"endUnixMilli"`
|
||||
FieldContext FieldContext `query:"fieldContext"`
|
||||
FieldDataType FieldDataType `query:"fieldDataType"`
|
||||
MetricName string `query:"metricName"`
|
||||
SearchText string `query:"searchText"`
|
||||
}
|
||||
|
||||
type GettableFieldValues struct {
|
||||
Values *TelemetryFieldValues `json:"values"`
|
||||
Complete bool `json:"complete"`
|
||||
}
|
||||
|
||||
type PostableFieldValueParams struct {
|
||||
PostableFieldKeysParams
|
||||
Name string `query:"name"`
|
||||
ExistingQuery string `query:"existingQuery"`
|
||||
}
|
||||
|
||||
func NewFieldKeySelectorFromPostableFieldKeysParams(params PostableFieldKeysParams) *FieldKeySelector {
|
||||
var req FieldKeySelector
|
||||
|
||||
if params.StartUnixMilli != 0 {
|
||||
req.StartUnixMilli = params.StartUnixMilli
|
||||
// Round down to the nearest 6 hours (21600000 milliseconds)
|
||||
req.StartUnixMilli -= req.StartUnixMilli % 21600000
|
||||
}
|
||||
|
||||
if params.EndUnixMilli != 0 {
|
||||
req.EndUnixMilli = params.EndUnixMilli
|
||||
}
|
||||
|
||||
req.Signal = params.Signal
|
||||
req.Source = params.Source
|
||||
req.FieldContext = params.FieldContext
|
||||
req.FieldDataType = params.FieldDataType
|
||||
req.SelectorMatchType = FieldSelectorMatchTypeFuzzy
|
||||
|
||||
if params.Limit != 0 {
|
||||
req.Limit = params.Limit
|
||||
} else {
|
||||
req.Limit = 1000
|
||||
}
|
||||
|
||||
if params.MetricName != "" {
|
||||
req.MetricContext = &MetricContext{
|
||||
MetricName: params.MetricName,
|
||||
}
|
||||
}
|
||||
|
||||
req.Name = params.SearchText
|
||||
if params.SearchText != "" && params.FieldContext == FieldContextUnspecified {
|
||||
parsedFieldKey := GetFieldKeyFromKeyText(params.SearchText)
|
||||
if parsedFieldKey.FieldContext != FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, req.Signal) {
|
||||
req.Name = parsedFieldKey.Name
|
||||
req.FieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &req
|
||||
}
|
||||
|
||||
func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValueParams) *FieldValueSelector {
|
||||
|
||||
keySelector := NewFieldKeySelectorFromPostableFieldKeysParams(params.PostableFieldKeysParams)
|
||||
|
||||
fieldValueSelector := &FieldValueSelector{
|
||||
FieldKeySelector: keySelector,
|
||||
}
|
||||
|
||||
fieldValueSelector.Name = params.Name
|
||||
|
||||
if params.Name != "" && fieldValueSelector.FieldContext == FieldContextUnspecified {
|
||||
parsedFieldKey := GetFieldKeyFromKeyText(params.Name)
|
||||
if parsedFieldKey.FieldContext != FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, fieldValueSelector.Signal) {
|
||||
fieldValueSelector.Name = parsedFieldKey.Name
|
||||
fieldValueSelector.FieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fieldValueSelector.ExistingQuery = params.ExistingQuery
|
||||
fieldValueSelector.Value = params.SearchText
|
||||
|
||||
if params.Limit != 0 {
|
||||
fieldValueSelector.Limit = params.Limit
|
||||
} else {
|
||||
fieldValueSelector.Limit = 50
|
||||
}
|
||||
|
||||
return fieldValueSelector
|
||||
}
|
||||
|
||||
@@ -154,3 +154,21 @@ func (f FieldContext) TagType() string {
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
|
||||
if ctx == FieldContextResource ||
|
||||
ctx == FieldContextAttribute ||
|
||||
ctx == FieldContextScope {
|
||||
return true
|
||||
}
|
||||
|
||||
switch signal.StringValue() {
|
||||
case SignalLogs.StringValue():
|
||||
return ctx == FieldContextLog || ctx == FieldContextBody
|
||||
case SignalTraces.StringValue():
|
||||
return ctx == FieldContextSpan || ctx == FieldContextEvent || ctx == FieldContextTrace
|
||||
case SignalMetrics.StringValue():
|
||||
return ctx == FieldContextMetric
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -77,3 +77,10 @@ func (enum *String) UnmarshalText(text []byte) error {
|
||||
func (enum String) MarshalText() (text []byte, err error) {
|
||||
return []byte(enum.StringValue()), nil
|
||||
}
|
||||
|
||||
// Implement Gin's BindUnmarshaler interface
|
||||
// See https://github.com/SigNoz/signoz/pull/10219 description for additional details
|
||||
func (enum *String) UnmarshalParam(param string) error {
|
||||
*enum = NewString(param)
|
||||
return nil
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user