Compare commits

...

13 Commits

Author SHA1 Message Date
srikanthccv
da2ad3b0c6 chore: resolve conflicts 2026-02-09 13:49:18 +05:30
Srikanth Chekuri
24b588bfba chore: move fields api to openapi spec (#10219) 2026-02-09 13:43:36 +05:30
Abhi kumar
e5867cc2ad chore: updated chart theme colors (#10233)
* chore: updated chart theme colors

* fix: fixed failing tests
2026-02-09 12:25:36 +05:30
Srikanth Chekuri
1251fb7e1d Merge branch 'main' into query-range-v5-openapi-spec 2026-02-09 07:05:46 +05:30
Srikanth Chekuri
b420ca494e chore: remove inline enum and implement jsonschema.Enum for metric types (#10238)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-02-09 07:02:32 +05:30
srikanthccv
25e81bef02 chore: add OpenAPI spec for /v5/query_range 2026-02-09 06:26:10 +05:30
Piyush Singariya
e4693ce64c fix: improve qbtoexpr test suite (#10217)
Some checks failed
build-staging / prepare (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
* fix: improve qbtoexpr test suite

* fix: assert eq order

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2026-02-06 15:15:56 +00:00
Ashwin Bhatkal
ca9b3a910a chore: separate out query and custom variable (#10221)
* chore: separate out query and custom variable

* chore: resolve own comments
2026-02-06 19:06:30 +05:30
Karan Balani
9dc7d2389a fix: minor changes for gateway and forgot password apis (#10204)
* fix: make size and count included in json if zero

* fix: make forgot password api fields required

* fix: openapi spec

* fix: error message casing for frontend

* chore: fix openapi spec

* fix: openapi specs
2026-02-06 18:00:33 +05:30
Abhi kumar
da5860297f feat: added new time series panel (#10207)
* feat: added new time series panel

* fix: pr review comments

* fix: pr review comments

* chore: memoized data creation in chartmanager
2026-02-06 16:35:30 +05:30
Ashwin Bhatkal
54fca5ba44 chore: separate out textbox variable into it's own component (#10206)
* chore: initiliase fetch store

* chore: small refactor

* chore: separate out textbox component into it's own component
2026-02-06 16:19:49 +05:30
Ashwin Bhatkal
66f4c3d6ec chore: add max-params eslint rule and update contribution guidelines (#10200)
Some checks failed
build-staging / js-build (push) Has been cancelled
build-staging / prepare (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* chore: add max-params eslint rule and update contribution guidelines

* chore: add no-cycle rule
2026-02-06 13:03:59 +05:30
Srikanth Chekuri
a0f407a848 chore(metricsexplorer): update tags and regenerate (#10197) 2026-02-06 12:10:11 +05:30
60 changed files with 5021 additions and 1424 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -9,8 +9,8 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -22,6 +22,8 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -56,7 +58,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -108,7 +109,22 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.Handle("/api/v5/query_range", handler.New(am.ViewAccess(ah.queryRangeV5), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
})).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)

View File

@@ -237,7 +237,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -61,6 +61,8 @@ module.exports = {
curly: 'error', // Requires curly braces for all control statements
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
// TODO: Change this to error in May 2026
'max-params': ['warn', 3], // a function can have max 3 params after which it should become an object
// TypeScript rules
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
@@ -116,7 +118,7 @@ module.exports = {
},
],
'import/no-extraneous-dependencies': ['error', { devDependencies: true }], // Prevents importing packages not in package.json
// 'import/no-cycle': 'warn', // TODO: Enable later to detect circular dependencies
'import/no-cycle': 'warn', // Warns about circular dependencies
// Import sorting rules
'simple-import-sort/imports': [
@@ -146,6 +148,19 @@ module.exports = {
'sonarjs/no-duplicate-string': 'off', // Disabled - can be noisy (enable periodically to check)
},
overrides: [
{
files: [
'**/*.test.{js,jsx,ts,tsx}',
'**/*.spec.{js,jsx,ts,tsx}',
'**/__tests__/**/*.{js,jsx,ts,tsx}',
],
rules: {
// Tests often have intentional duplication and complexity - disable SonarJS rules
'sonarjs/cognitive-complexity': 'off', // Tests can be complex
'sonarjs/no-identical-functions': 'off', // Similar test patterns are OK
'sonarjs/no-small-switch': 'off', // Small switches are OK in tests
},
},
{
files: ['src/api/generated/**/*.ts'],
rules: {
@@ -153,7 +168,6 @@ module.exports = {
'@typescript-eslint/explicit-module-boundary-types': 'off',
'no-nested-ternary': 'off',
'@typescript-eslint/no-unused-vars': 'warn',
'sonarjs/no-duplicate-string': 'off',
},
},
],

View File

@@ -2,6 +2,11 @@
Embrace the spirit of collaboration and contribute to the success of our open-source project by adhering to these frontend development guidelines with precision and passion.
### Export Style
- **React components** (`src/components/`, `src/container/`, `src/pages/`): Prefer **default exports** for the main component in each file
- **Utilities, hooks, APIs, types, constants** (`src/utils/`, `src/hooks/`, `src/api/`, `src/lib/`, `src/types/`, `src/constants/`): Prefer **named exports** for better tree-shaking and explicit imports
### React and Components
- Strive to create small and modular components, ensuring they are divided into individual pieces for improved maintainability and reusability.

View File

@@ -28,8 +28,10 @@ import type {
GatewaytypesPostableIngestionKeyLimitDTO,
GatewaytypesUpdatableIngestionKeyLimitDTO,
GetIngestionKeys200,
GetIngestionKeysParams,
RenderErrorResponseDTO,
SearchIngestionKeys200,
SearchIngestionKeysParams,
UpdateIngestionKeyLimitPathParameters,
UpdateIngestionKeyPathParameters,
} from '../sigNoz.schemas';
@@ -42,35 +44,44 @@ type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
* This endpoint returns the ingestion keys for a workspace
* @summary Get ingestion keys for workspace
*/
export const getIngestionKeys = (signal?: AbortSignal) => {
export const getIngestionKeys = (
params?: GetIngestionKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetIngestionKeys200>({
url: `/api/v2/gateway/ingestion_keys`,
method: 'GET',
params,
signal,
});
};
export const getGetIngestionKeysQueryKey = () => {
return ['getIngestionKeys'] as const;
export const getGetIngestionKeysQueryKey = (
params?: GetIngestionKeysParams,
) => {
return ['getIngestionKeys', ...(params ? [params] : [])] as const;
};
export const getGetIngestionKeysQueryOptions = <
TData = Awaited<ReturnType<typeof getIngestionKeys>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getIngestionKeys>>,
TError,
TData
>;
}) => {
>(
params?: GetIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getIngestionKeys>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetIngestionKeysQueryKey();
const queryKey = queryOptions?.queryKey ?? getGetIngestionKeysQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getIngestionKeys>>> = ({
signal,
}) => getIngestionKeys(signal);
}) => getIngestionKeys(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getIngestionKeys>>,
@@ -91,14 +102,17 @@ export type GetIngestionKeysQueryError = RenderErrorResponseDTO;
export function useGetIngestionKeys<
TData = Awaited<ReturnType<typeof getIngestionKeys>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getIngestionKeys>>,
TError,
TData
>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetIngestionKeysQueryOptions(options);
>(
params?: GetIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getIngestionKeys>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetIngestionKeysQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -114,10 +128,11 @@ export function useGetIngestionKeys<
*/
export const invalidateGetIngestionKeys = async (
queryClient: QueryClient,
params?: GetIngestionKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetIngestionKeysQueryKey() },
{ queryKey: getGetIngestionKeysQueryKey(params) },
options,
);
@@ -662,35 +677,45 @@ export const useUpdateIngestionKeyLimit = <
* This endpoint returns the ingestion keys for a workspace
* @summary Search ingestion keys for workspace
*/
export const searchIngestionKeys = (signal?: AbortSignal) => {
export const searchIngestionKeys = (
params?: SearchIngestionKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<SearchIngestionKeys200>({
url: `/api/v2/gateway/ingestion_keys/search`,
method: 'GET',
params,
signal,
});
};
export const getSearchIngestionKeysQueryKey = () => {
return ['searchIngestionKeys'] as const;
export const getSearchIngestionKeysQueryKey = (
params?: SearchIngestionKeysParams,
) => {
return ['searchIngestionKeys', ...(params ? [params] : [])] as const;
};
export const getSearchIngestionKeysQueryOptions = <
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
TError,
TData
>;
}) => {
>(
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getSearchIngestionKeysQueryKey();
const queryKey =
queryOptions?.queryKey ?? getSearchIngestionKeysQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof searchIngestionKeys>>
> = ({ signal }) => searchIngestionKeys(signal);
> = ({ signal }) => searchIngestionKeys(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -711,14 +736,17 @@ export type SearchIngestionKeysQueryError = RenderErrorResponseDTO;
export function useSearchIngestionKeys<
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
TError,
TData
>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getSearchIngestionKeysQueryOptions(options);
>(
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getSearchIngestionKeysQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -734,10 +762,11 @@ export function useSearchIngestionKeys<
*/
export const invalidateSearchIngestionKeys = async (
queryClient: QueryClient,
params?: SearchIngestionKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getSearchIngestionKeysQueryKey() },
{ queryKey: getSearchIngestionKeysQueryKey(params) },
options,
);

View File

@@ -47,7 +47,7 @@ type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
* @summary Get metric alerts
*/
export const getMetricAlerts = (
params?: GetMetricAlertsParams,
params: GetMetricAlertsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAlerts200>({
@@ -66,7 +66,7 @@ export const getGetMetricAlertsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricAlertsParams,
params: GetMetricAlertsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -103,7 +103,7 @@ export function useGetMetricAlerts<
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricAlertsParams,
params: GetMetricAlertsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -128,7 +128,7 @@ export function useGetMetricAlerts<
*/
export const invalidateGetMetricAlerts = async (
queryClient: QueryClient,
params?: GetMetricAlertsParams,
params: GetMetricAlertsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
@@ -144,7 +144,7 @@ export const invalidateGetMetricAlerts = async (
* @summary Get metric dashboards
*/
export const getMetricDashboards = (
params?: GetMetricDashboardsParams,
params: GetMetricDashboardsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricDashboards200>({
@@ -165,7 +165,7 @@ export const getGetMetricDashboardsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricDashboardsParams,
params: GetMetricDashboardsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -203,7 +203,7 @@ export function useGetMetricDashboards<
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricDashboardsParams,
params: GetMetricDashboardsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -228,7 +228,7 @@ export function useGetMetricDashboards<
*/
export const invalidateGetMetricDashboards = async (
queryClient: QueryClient,
params?: GetMetricDashboardsParams,
params: GetMetricDashboardsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
@@ -244,7 +244,7 @@ export const invalidateGetMetricDashboards = async (
* @summary Get metric highlights
*/
export const getMetricHighlights = (
params?: GetMetricHighlightsParams,
params: GetMetricHighlightsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricHighlights200>({
@@ -265,7 +265,7 @@ export const getGetMetricHighlightsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricHighlightsParams,
params: GetMetricHighlightsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -303,7 +303,7 @@ export function useGetMetricHighlights<
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricHighlightsParams,
params: GetMetricHighlightsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -328,7 +328,7 @@ export function useGetMetricHighlights<
*/
export const invalidateGetMetricHighlights = async (
queryClient: QueryClient,
params?: GetMetricHighlightsParams,
params: GetMetricHighlightsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
@@ -526,7 +526,7 @@ export const useGetMetricAttributes = <
* @summary Get metric metadata
*/
export const getMetricMetadata = (
params?: GetMetricMetadataParams,
params: GetMetricMetadataParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
@@ -547,7 +547,7 @@ export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricMetadataParams,
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
@@ -585,7 +585,7 @@ export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params?: GetMetricMetadataParams,
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
@@ -610,7 +610,7 @@ export function useGetMetricMetadata<
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
params?: GetMetricMetadataParams,
params: GetMetricMetadataParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(

View File

@@ -650,11 +650,11 @@ export interface MetricsexplorertypesMetricAlertDTO {
/**
* @type string
*/
alertId?: string;
alertId: string;
/**
* @type string
*/
alertName?: string;
alertName: string;
}
export interface MetricsexplorertypesMetricAlertsResponseDTO {
@@ -662,24 +662,24 @@ export interface MetricsexplorertypesMetricAlertsResponseDTO {
* @type array
* @nullable true
*/
alerts?: MetricsexplorertypesMetricAlertDTO[] | null;
alerts: MetricsexplorertypesMetricAlertDTO[] | null;
}
export interface MetricsexplorertypesMetricAttributeDTO {
/**
* @type string
*/
key?: string;
key: string;
/**
* @type integer
* @minimum 0
*/
valueCount?: number;
valueCount: number;
/**
* @type array
* @nullable true
*/
values?: string[] | null;
values: string[] | null;
}
export interface MetricsexplorertypesMetricAttributesRequestDTO {
@@ -691,7 +691,7 @@ export interface MetricsexplorertypesMetricAttributesRequestDTO {
/**
* @type string
*/
metricName?: string;
metricName: string;
/**
* @type integer
* @nullable true
@@ -704,31 +704,31 @@ export interface MetricsexplorertypesMetricAttributesResponseDTO {
* @type array
* @nullable true
*/
attributes?: MetricsexplorertypesMetricAttributeDTO[] | null;
attributes: MetricsexplorertypesMetricAttributeDTO[] | null;
/**
* @type integer
* @format int64
*/
totalKeys?: number;
totalKeys: number;
}
export interface MetricsexplorertypesMetricDashboardDTO {
/**
* @type string
*/
dashboardId?: string;
dashboardId: string;
/**
* @type string
*/
dashboardName?: string;
dashboardName: string;
/**
* @type string
*/
widgetId?: string;
widgetId: string;
/**
* @type string
*/
widgetName?: string;
widgetName: string;
}
export interface MetricsexplorertypesMetricDashboardsResponseDTO {
@@ -736,7 +736,7 @@ export interface MetricsexplorertypesMetricDashboardsResponseDTO {
* @type array
* @nullable true
*/
dashboards?: MetricsexplorertypesMetricDashboardDTO[] | null;
dashboards: MetricsexplorertypesMetricDashboardDTO[] | null;
}
export interface MetricsexplorertypesMetricHighlightsResponseDTO {
@@ -744,74 +744,65 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
* @type integer
* @minimum 0
*/
activeTimeSeries?: number;
activeTimeSeries: number;
/**
* @type integer
* @minimum 0
*/
dataPoints?: number;
dataPoints: number;
/**
* @type integer
* @minimum 0
*/
lastReceived?: number;
lastReceived: number;
/**
* @type integer
* @minimum 0
*/
totalTimeSeries?: number;
totalTimeSeries: number;
}
export interface MetricsexplorertypesMetricMetadataDTO {
/**
* @type string
*/
description?: string;
description: string;
/**
* @type boolean
*/
isMonotonic?: boolean;
isMonotonic: boolean;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @type string
*/
temporality?: string;
/**
* @type string
*/
type?: string;
/**
* @type string
*/
unit?: string;
unit: string;
}
export interface MetricsexplorertypesStatDTO {
/**
* @type string
*/
description?: string;
description: string;
/**
* @type string
*/
metricName?: string;
metricName: string;
/**
* @type integer
* @minimum 0
*/
samples?: number;
samples: number;
/**
* @type integer
* @minimum 0
*/
timeseries?: number;
timeseries: number;
type: MetrictypesTypeDTO;
/**
* @type string
*/
type?: string;
/**
* @type string
*/
unit?: string;
unit: string;
}
export interface MetricsexplorertypesStatsRequestDTO {
@@ -819,12 +810,12 @@ export interface MetricsexplorertypesStatsRequestDTO {
* @type integer
* @format int64
*/
end?: number;
end: number;
filter?: Querybuildertypesv5FilterDTO;
/**
* @type integer
*/
limit?: number;
limit: number;
/**
* @type integer
*/
@@ -834,7 +825,7 @@ export interface MetricsexplorertypesStatsRequestDTO {
* @type integer
* @format int64
*/
start?: number;
start: number;
}
export interface MetricsexplorertypesStatsResponseDTO {
@@ -842,51 +833,52 @@ export interface MetricsexplorertypesStatsResponseDTO {
* @type array
* @nullable true
*/
metrics?: MetricsexplorertypesStatDTO[] | null;
metrics: MetricsexplorertypesStatDTO[] | null;
/**
* @type integer
* @minimum 0
*/
total?: number;
total: number;
}
export interface MetricsexplorertypesTreemapEntryDTO {
/**
* @type string
*/
metricName?: string;
metricName: string;
/**
* @type number
* @format double
*/
percentage?: number;
percentage: number;
/**
* @type integer
* @minimum 0
*/
totalValue?: number;
totalValue: number;
}
export enum MetricsexplorertypesTreemapModeDTO {
timeseries = 'timeseries',
samples = 'samples',
}
export interface MetricsexplorertypesTreemapRequestDTO {
/**
* @type integer
* @format int64
*/
end?: number;
end: number;
filter?: Querybuildertypesv5FilterDTO;
/**
* @type integer
*/
limit?: number;
/**
* @type string
*/
mode?: string;
limit: number;
mode: MetricsexplorertypesTreemapModeDTO;
/**
* @type integer
* @format int64
*/
start?: number;
start: number;
}
export interface MetricsexplorertypesTreemapResponseDTO {
@@ -894,41 +886,47 @@ export interface MetricsexplorertypesTreemapResponseDTO {
* @type array
* @nullable true
*/
samples?: MetricsexplorertypesTreemapEntryDTO[] | null;
samples: MetricsexplorertypesTreemapEntryDTO[] | null;
/**
* @type array
* @nullable true
*/
timeseries?: MetricsexplorertypesTreemapEntryDTO[] | null;
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
}
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
/**
* @type string
*/
description?: string;
description: string;
/**
* @type boolean
*/
isMonotonic?: boolean;
isMonotonic: boolean;
/**
* @type string
*/
metricName?: string;
metricName: string;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @type string
*/
temporality?: string;
/**
* @type string
*/
type?: string;
/**
* @type string
*/
unit?: string;
unit: string;
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetrictypesTypeDTO {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface PreferencetypesPreferenceDTO {
/**
* @type array
@@ -1347,7 +1345,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
email?: string;
email: string;
/**
* @type string
*/
@@ -1355,7 +1353,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
orgId?: string;
orgId: string;
}
export interface TypesPostableInviteDTO {
@@ -1851,6 +1849,19 @@ export type GetFeatures200 = {
status?: string;
};
export type GetIngestionKeysParams = {
/**
* @type integer
* @description undefined
*/
page?: number;
/**
* @type integer
* @description undefined
*/
per_page?: number;
};
export type GetIngestionKeys200 = {
data?: GatewaytypesGettableIngestionKeysDTO;
/**
@@ -1890,6 +1901,24 @@ export type DeleteIngestionKeyLimitPathParameters = {
export type UpdateIngestionKeyLimitPathParameters = {
limitId: string;
};
export type SearchIngestionKeysParams = {
/**
* @type string
* @description undefined
*/
name?: string;
/**
* @type integer
* @description undefined
*/
page?: number;
/**
* @type integer
* @description undefined
*/
per_page?: number;
};
export type SearchIngestionKeys200 = {
data?: GatewaytypesGettableIngestionKeysDTO;
/**
@@ -1903,7 +1932,7 @@ export type GetMetricAlertsParams = {
* @type string
* @description undefined
*/
metricName?: string;
metricName: string;
};
export type GetMetricAlerts200 = {
@@ -1919,7 +1948,7 @@ export type GetMetricDashboardsParams = {
* @type string
* @description undefined
*/
metricName?: string;
metricName: string;
};
export type GetMetricDashboards200 = {
@@ -1935,7 +1964,7 @@ export type GetMetricHighlightsParams = {
* @type string
* @description undefined
*/
metricName?: string;
metricName: string;
};
export type GetMetricHighlights200 = {
@@ -1962,7 +1991,7 @@ export type GetMetricMetadataParams = {
* @type string
* @description undefined
*/
metricName?: string;
metricName: string;
};
export type GetMetricMetadata200 = {

View File

@@ -34,159 +34,230 @@ const themeColors = {
cyan: '#00FFFF',
},
chartcolors: {
radicalRed: '#FF1A66',
// Blues (3)
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
seaGreen: '#219653',
turquoiseBlue: '#56CCF2',
festivalOrange: '#F2C94C',
silver: '#BDBDBD',
outrageousOrange: '#FF6633',
roseBud: '#FFB399',
canary: '#FFFF99',
deepSkyBlue: '#00B3E6',
goldTips: '#E6B333',
royalBlue: '#3366E6',
avocado: '#999966',
mintGreen: '#99FF99',
chestnut: '#B34D4D',
lima: '#80B300',
olive: '#809900',
beautyBush: '#E6B3B3',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRose: '#FF99E6',
electricLime: '#CCFF1A',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
turquoise: '#33FFCC',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
yellow: '#FFFF00',
purple: '#800080',
cyan: '#00FFFF',
magenta: '#FF00FF',
orange: '#FFA500',
pink: '#FFC0CB',
brown: '#A52A2A',
teal: '#008080',
lime: '#00FF00',
maroon: '#800000',
navy: '#000080',
aquamarine: '#7FFFD4',
darkSeaGreen: '#8FBC8F',
gray: '#808080',
skyBlue: '#87CEEB',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peru: '#CD853F',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrown: '#BC8F8F',
darkSlateGray: '#2F4F4F',
mediumAquamarine: '#66CDAA',
lavender: '#E6E6FA',
thistle: '#D8BFD8',
salmon: '#FA8072',
darkSalmon: '#E9967A',
paleVioletRed: '#DB7093',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
lawnGreen: '#7CFC00',
// Teals / Cyans (3)
turquoise: '#00CEC9',
lagoon: '#1ABC9C',
cyanBright: '#22A6F2',
// Greens (3)
emeraldGreen: '#27AE60',
mediumSeaGreen: '#3CB371',
lightCoral: '#F08080',
gold: '#FFD700',
sandyBrown: '#F4A460',
darkKhaki: '#BDB76B',
cornflowerBlue: '#6495ED',
mediumVioletRed: '#C71585',
paleGreen: '#98FB98',
limeGreen: '#A3E635',
// Yellows / Golds (3)
festivalYellow: '#F2C94C',
sunflower: '#FFD93D',
warmAmber: '#FFCA28',
// Oranges (3)
festivalOrange: '#F2994A',
coralOrange: '#E17055',
pumpkin: '#FF7F50',
// Reds (3)
radicalRed: '#FF1A66',
crimsonRed: '#EB5757',
fireRed: '#E10600',
// Pinks (3)
hotPink: '#E84393',
rosePink: '#FD79A8',
blush: '#FF7EB6',
// Purples / Violets (3)
mediumPurple: '#BB6BD9',
royalPurple: '#9B51E0',
orchid: '#DA77F2',
// Accent / Neon / Unique Colors (3)
neonViolet: '#C77DFF',
electricPurple: '#6C5CE7',
arcticBlue: '#48DBFB',
// Extended palette — systematic variations to reach 100 colors
blue1: '#1F63E0',
blue2: '#3A7AED',
blue3: '#5A9DF5',
cyan1: '#00B0AA',
cyan2: '#33D6C2',
cyan3: '#66E9DA',
green1: '#1E8449',
green2: '#2ECC71',
green3: '#58D68D',
yellow1: '#F1C40F',
yellow2: '#F7DC6F',
yellow3: '#F9E79F',
orange1: '#D35400',
orange2: '#E67E22',
orange3: '#F5B041',
red1: '#C0392B',
red2: '#E74C3C',
red3: '#EC7063',
pink1: '#D81B60',
pink2: '#E91E63',
pink3: '#F06292',
purple1: '#8E44AD',
purple2: '#9B59B6',
purple3: '#BB8FCE',
teal1: '#009688',
teal2: '#1ABC9C',
teal3: '#48C9B0',
lime1: '#A3E635',
lime2: '#B9F18D',
lime3: '#D4FFB0',
gold1: '#F39C12',
gold2: '#F1C40F',
gold3: '#F7DC6F',
coral1: '#E67E22',
coral2: '#F39C12',
coral3: '#F5B041',
crimson1: '#C0392B',
crimson2: '#E74C3C',
crimson3: '#EC7063',
violet1: '#8E44AD',
violet2: '#9B59B6',
violet3: '#BB8FCE',
aqua1: '#00BFFF',
aqua2: '#1E90FF',
aqua3: '#63B8FF',
forest1: '#27AE60',
forest2: '#2ECC71',
forest3: '#58D68D',
blush1: '#FF6F91',
blush2: '#FF85A2',
blush3: '#FFA0B3',
lavender1: '#9B59B6',
lavender2: '#AF7AC5',
lavender3: '#C39BD3',
tomato1: '#E74C3C',
tomato2: '#EC7063',
tomato3: '#F1948A',
salmon1: '#FF6B6B',
salmon2: '#FF8787',
salmon3: '#FFA1A1',
mustard1: '#F1C40F',
mustard2: '#F7DC6F',
mustard3: '#F9E79F',
teal4: '#1ABC9C',
teal5: '#48C9B0',
teal6: '#76D7C4',
magenta1: '#D6336C',
magenta2: '#E84393',
magenta3: '#F06292',
violet4: '#7D3C98',
violet5: '#8E44AD',
violet6: '#9B59B6',
green4: '#229954',
green5: '#27AE60',
green6: '#52BE80',
blue4: '#2874A6',
blue5: '#2E86C1',
blue6: '#3498DB',
red4: '#C0392B',
red5: '#E74C3C',
red6: '#EC7063',
orange4: '#D35400',
orange5: '#E67E22',
orange6: '#EB984E',
pink4: '#C2185B',
pink5: '#D81B60',
pink6: '#E91E63',
gold4: '#B7950B',
gold5: '#F1C40F',
gold6: '#F4D03F',
},
lightModeColor: {
radicalRed: '#FF1A66',
dodgerBlueDark: '#0C6EED',
steelgrey: '#2f4b7c',
steelpurple: '#665191',
steelindigo: '#a05195',
steelpink: '#d45087',
steelcoral: '#f95d6a',
steelorange: '#ff7c43',
steelgold: '#ffa600',
steelrust: '#de425b',
steelgreen: '#41967e',
mediumOrchidDark: '#C326FD',
seaBuckthornDark: '#E66E05',
seaGreen: '#219653',
turquoiseBlueDark: '#0099CC',
silverDark: '#757575',
outrageousOrangeDark: '#F9521A',
roseBudDark: '#EB6437',
deepSkyBlueDark: '#0595BD',
royalBlue: '#3366E6',
avocadoDark: '#8E8E29',
mintGreenDark: '#00C700',
chestnut: '#B34D4D',
limaDark: '#6E9900',
olive: '#809900',
beautyBushDark: '#E25555',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRoseDark: '#F024BD',
electricLimeDark: '#84A800',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
purple: '#800080',
magentaDark: '#EB00EB',
pinkDark: '#FF3D5E',
brown: '#A52A2A',
teal: '#008080',
limeDark: '#07A207',
maroon: '#800000',
navy: '#000080',
gray: '#808080',
skyBlueDark: '#0CA7E4',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peruDark: '#D16E0A',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrownDark: '#CB4848',
darkSlateGray: '#2F4F4F',
fuchsia: '#FF0AFF',
salmonDark: '#FF432E',
darkSalmonDark: '#D26541',
paleVioletRedDark: '#E83089',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
mediumSeaGreenDark: '#109E50',
lightCoralDark: '#F85959',
gold: '#FFD700',
sandyBrownDark: '#D97117',
darkKhakiDark: '#99900A',
cornflowerBlueDark: '#3371E6',
mediumVioletRed: '#C71585',
paleGreenDark: '#0D910D',
radicalRed: '#D81B60',
dodgerBlueDark: '#1E5BD9',
steelgrey: '#344B6B',
steelpurple: '#5E548E',
steelindigo: '#8E4A7C',
steelpink: '#B63A6F',
steelcoral: '#E14B5A',
steelorange: '#E76F2F',
steelgold: '#E09B00',
steelrust: '#C93A50',
steelgreen: '#2F7D69',
mediumOrchidDark: '#8E24AA',
seaBuckthornDark: '#C75A00',
seaGreen: '#1E7F5A',
turquoiseBlueDark: '#007EA7',
silverDark: '#5F5F5F',
outrageousOrangeDark: '#E64A19',
roseBudDark: '#D84315',
deepSkyBlueDark: '#0277BD',
royalBlue: '#2A4FDB',
avocadoDark: '#6B6B1E',
mintGreenDark: '#2E9E55',
chestnut: '#8B3A3A',
limaDark: '#5C7F00',
olive: '#6E7F00',
beautyBushDark: '#C93C3C',
danube: '#4F6FB3',
oliveDrab: '#4F7F1A',
lavenderRoseDark: '#B0178F',
electricLimeDark: '#6B8F00',
robin: '#2F4FCC',
harleyOrange: '#CC2E12',
gladeGreen: '#4F7F46',
hemlock: '#5C5C45',
vidaLoca: '#3D6B00',
rust: '#993300',
red: '#C62828',
blue: '#1A237E',
green: '#1B7F3A',
purple: '#6A1B9A',
magentaDark: '#B000B5',
pinkDark: '#C2185B',
brown: '#7A3A1E',
teal: '#006D6F',
limeDark: '#4C8C2B',
maroon: '#6D1B1B',
navy: '#0D1B5E',
gray: '#616161',
skyBlueDark: '#0288D1',
indigo: '#303F9F',
slateGray: '#556B7C',
chocolate: '#9C4A1A',
tomato: '#E53935',
steelBlue: '#3A6EA5',
peruDark: '#B35E00',
darkOliveGreen: '#445B1F',
indianRed: '#B04040',
mediumSlateBlue: '#5C6BC0',
rosyBrownDark: '#A94444',
darkSlateGray: '#2E4A4A',
fuchsia: '#C511C5',
salmonDark: '#E64A3C',
darkSalmonDark: '#C85A3A',
paleVioletRedDark: '#C2186A',
mediumPurple: '#7E57C2',
darkOrchid: '#7B1FA2',
mediumSeaGreenDark: '#2E8B57',
lightCoralDark: '#E57373',
gold: '#D4AF37',
sandyBrownDark: '#C76A15',
darkKhakiDark: '#8A7F00',
cornflowerBlueDark: '#355FCC',
mediumVioletRed: '#AD1457',
paleGreenDark: '#2E7D32',
},
errorColor: '#d32f2f',
royalGrey: '#888888',

View File

@@ -0,0 +1,53 @@
import { memo, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { VariableItemProps } from './VariableItem';
type CustomVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate'
>;
function CustomVariableInput({
variableData,
onValueUpdate,
}: CustomVariableInputProps): JSX.Element {
const optionsData: (string | number | boolean)[] = useMemo(() => {
return sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as (string | number | boolean)[];
}, [variableData.customValue, variableData.sort]);
const {
value,
defaultValue,
enableSelectAll,
onChange,
onDropdownVisibleChange,
handleClear,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
});
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
/>
);
}
export default memo(CustomVariableInput);

View File

@@ -25,12 +25,6 @@
}
}
&.focused {
.variable-value {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-value {
display: flex;
min-width: 120px;
@@ -48,6 +42,11 @@
font-style: normal;
font-weight: 400;
line-height: 16px; /* 133.333% */
&:hover,
&:focus-within {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-select {
@@ -99,12 +98,6 @@
.lightMode {
.variable-item {
&.focused {
.variable-value {
border: 1px solid var(--bg-robin-400);
}
}
.variable-name {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
@@ -115,6 +108,11 @@
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
color: var(--bg-ink-400);
&:hover,
&:focus-within {
outline: 1px solid var(--bg-robin-400);
}
}
}
}
@@ -124,3 +122,9 @@
padding: 4px 12px;
font-size: 12px;
}
.dashboard-variables-selection-container {
display: flex;
flex-wrap: wrap;
gap: 12px;
}

View File

@@ -1,4 +1,4 @@
import { memo, useEffect } from 'react';
import { memo, useCallback, useEffect, useMemo } from 'react';
import { useSelector } from 'react-redux';
import { Row } from 'antd';
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
@@ -7,7 +7,6 @@ import {
useDashboardVariablesSelector,
} from 'hooks/dashboard/useDashboardVariables';
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import { AppState } from 'store/reducers';
@@ -22,7 +21,6 @@ import './DashboardVariableSelection.styles.scss';
function DashboardVariableSelection(): JSX.Element | null {
const {
selectedDashboard,
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
@@ -52,34 +50,36 @@ function DashboardVariableSelection(): JSX.Element | null {
);
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
// also trigger when the global time changes
useEffect(
() => {
if (!isEmpty(dependencyData?.order)) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[JSON.stringify(dependencyData?.order), minTime, maxTime],
// Memoize the order key to avoid unnecessary triggers
const dependencyOrderKey = useMemo(
() => dependencyData?.order?.join(',') ?? '',
[dependencyData?.order],
);
// Trigger refetch when dependency order changes or global time changes
useEffect(() => {
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
// Performance optimization: For dynamic variables with allSelected=true, we don't store
// individual values in localStorage since we can always derive them from available options.
// This makes localStorage much lighter and more efficient.
const onValueUpdate = (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): void => {
if (id) {
const onValueUpdate = useCallback(
(
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): void => {
// For dynamic variables, only store in localStorage when NOT allSelected
// This makes localStorage much lighter by avoiding storing all individual values
const variable = dashboardVariables?.[id] || dashboardVariables?.[name];
const isDynamic = variable?.type === 'DYNAMIC';
const variable = dashboardVariables[id] || dashboardVariables[name];
const isDynamic = variable.type === 'DYNAMIC';
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
if (allSelected) {
@@ -88,41 +88,39 @@ function DashboardVariableSelection(): JSX.Element | null {
updateUrlVariable(name || id, value);
}
if (selectedDashboard) {
setSelectedDashboard((prev) => {
if (prev) {
const oldVariables = prev?.data.variables;
// this is added to handle case where we have two different
// schemas for variable response
if (oldVariables?.[id]) {
oldVariables[id] = {
...oldVariables[id],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
if (oldVariables?.[name]) {
oldVariables[name] = {
...oldVariables[name],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return {
...prev,
data: {
...prev?.data,
variables: {
...oldVariables,
},
},
setSelectedDashboard((prev) => {
if (prev) {
const oldVariables = { ...prev?.data.variables };
// this is added to handle case where we have two different
// schemas for variable response
if (oldVariables?.[id]) {
oldVariables[id] = {
...oldVariables[id],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return prev;
});
}
if (oldVariables?.[name]) {
oldVariables[name] = {
...oldVariables[name],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return {
...prev,
data: {
...prev?.data,
variables: {
...oldVariables,
},
},
};
}
return prev;
});
if (dependencyData) {
const updatedVariables: string[] = [];
@@ -138,11 +136,20 @@ function DashboardVariableSelection(): JSX.Element | null {
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
}
};
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
return (
<Row style={{ display: 'flex', gap: '12px' }}>
<Row className="dashboard-variables-selection-container">
{sortedVariablesArray.map((variable) => {
const key = `${variable.name}${variable.id}${variable.order}`;

View File

@@ -23,9 +23,9 @@ import { SelectItemStyle } from './styles';
import {
areArraysEqual,
getOptionsForDynamicVariable,
getSelectValue,
uniqueValues,
} from './util';
import { getSelectValue } from './VariableItem';
import './DashboardVariableSelection.styles.scss';

View File

@@ -0,0 +1,229 @@
import { memo, useCallback, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
interface QueryVariableInputProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const {
tempSelection,
setTempSelection,
value,
defaultValue,
enableSelectAll,
onChange,
onDropdownVisibleChange,
handleClear,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
});
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
};
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
],
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
error: string;
};
}) => {
const { details } = error;
if (details.error) {
let message = details.error;
if ((details.error ?? '').toString().includes('Syntax error:')) {
message =
'Please make sure query is valid and dependent variables are selected';
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
const handleRetry = useCallback((): void => {
setErrorMessage(null);
refetch();
}, [refetch]);
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
// query variable specific, API related props
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
/>
);
}
export default memo(QueryVariableInput);

View File

@@ -0,0 +1,134 @@
import { memo, useMemo } from 'react';
import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Popover, Tooltip, Typography } from 'antd';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
import { SelectItemStyle } from './styles';
const errorIconStyle = { margin: '0 0.5rem' };
interface SelectVariableInputProps {
variableId: string;
options: (string | number | boolean)[];
value: string | string[] | undefined;
enableSelectAll: boolean;
isMultiSelect: boolean;
onChange: (value: string | string[]) => void;
onClear: () => void;
defaultValue?: string | string[];
onDropdownVisibleChange?: (visible: boolean) => void;
loading?: boolean;
errorMessage?: string | null;
onRetry?: () => void;
}
const MAX_TAG_DISPLAY_VALUES = 10;
function maxTagPlaceholder(
omittedValues: { label?: React.ReactNode; value?: string | number }[],
): JSX.Element {
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
const tooltipText =
valuesToShow.map(({ value: v }) => v ?? '').join(', ') +
(hasMore ? ` + ${omittedValues.length - MAX_TAG_DISPLAY_VALUES} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}
function SelectVariableInput({
variableId,
options,
value,
onChange,
onDropdownVisibleChange,
onClear,
loading,
errorMessage,
onRetry,
enableSelectAll,
isMultiSelect,
defaultValue,
}: SelectVariableInputProps): JSX.Element {
const selectOptions = useMemo(
() =>
options.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[options],
);
const commonProps = useMemo(
() => ({
// main props
key: variableId,
value,
defaultValue,
// setup props
placeholder: 'Select value',
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
// dynamic props
'data-testid': 'variable-select',
onChange,
loading,
options: selectOptions,
errorMessage,
onRetry,
}),
[
variableId,
defaultValue,
onChange,
loading,
selectOptions,
value,
errorMessage,
onRetry,
],
);
return (
<>
{isMultiSelect ? (
<CustomMultiSelect
{...commonProps}
placement="bottomLeft"
maxTagCount={2}
onDropdownVisibleChange={onDropdownVisibleChange}
maxTagPlaceholder={maxTagPlaceholder}
onClear={onClear}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
/>
) : (
<CustomSelect {...commonProps} />
)}
{errorMessage && (
<span style={errorIconStyle}>
<Popover placement="top" content={<Typography>{errorMessage}</Typography>}>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</>
);
}
export default memo(SelectVariableInput);

View File

@@ -0,0 +1,85 @@
import { memo, useCallback, useRef, useState } from 'react';
import { Input, InputRef } from 'antd';
import { VariableItemProps } from './VariableItem';
type TextboxVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate'
>;
function TextboxVariableInput({
variableData,
onValueUpdate,
}: TextboxVariableInputProps): JSX.Element {
const handleChange = useCallback(
(inputValue: string | string[]): void => {
if (inputValue === variableData.selectedValue) {
return;
}
if (variableData.name) {
onValueUpdate(variableData.name, variableData.id, inputValue, false);
}
},
[
onValueUpdate,
variableData.id,
variableData.name,
variableData.selectedValue,
],
);
const textboxInputRef = useRef<InputRef>(null);
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const handleInputOnChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {
setTextboxInputValue(event.target.value);
},
[setTextboxInputValue],
);
const handleInputOnBlur = useCallback(
(event: React.FocusEvent<HTMLInputElement>): void => {
const value = event.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
handleChange(variableData.defaultValue.toString());
} else {
handleChange(value);
}
},
[handleChange, variableData.defaultValue],
);
const handleInputOnKeyDown = useCallback(
(event: React.KeyboardEvent<HTMLInputElement>): void => {
if (event.key === 'Enter') {
textboxInputRef.current?.blur();
}
},
[],
);
return (
<Input
key={variableData.id}
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
value={textboxInputValue}
title={textboxInputValue}
onChange={handleInputOnChange}
onBlur={handleInputOnBlur}
onKeyDown={handleInputOnKeyDown}
/>
);
}
export default memo(TextboxVariableInput);

View File

@@ -1,35 +1,16 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable jsx-a11y/click-events-have-key-events */
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable react/jsx-props-no-spreading */
/* eslint-disable no-nested-ternary */
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { orange } from '@ant-design/colors';
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE, variablePropsToPayloadVariables } from '../utils';
import { SelectItemStyle } from './styles';
import { areArraysEqual, checkAPIInvocation } from './util';
import CustomVariableInput from './CustomVariableInput';
import QueryVariableInput from './QueryVariableInput';
import TextboxVariableInput from './TextboxVariableInput';
import './DashboardVariableSelection.styles.scss';
interface VariableItemProps {
export interface VariableItemProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
@@ -43,488 +24,49 @@ interface VariableItemProps {
dependencyData: IDependencyData | null;
}
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] | undefined => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString();
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString();
};
// eslint-disable-next-line sonarjs/cognitive-complexity
function VariableItem({
variableData,
existingVariables,
onValueUpdate,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
// Local state for textbox input to ensure smooth editing experience
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
const textboxInputRef = useRef<InputRef>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
// variableData.name is present as the top element or next in the queue - variablesToGetUpdated
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
const [errorMessage, setErrorMessage] = useState<null | string>(null);
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
if (variablesRes && variableData.type === 'QUERY') {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
/* eslint-disable no-useless-escape */
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
const isUsed = newOptionsData.includes(val);
if (!isUsed) {
valueNotInList = true;
}
});
} else if (isString(variableData.selectedValue)) {
const isUsed = newOptionsData.includes(variableData.selectedValue);
if (!isUsed) {
valueNotInList = true;
}
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.type === 'QUERY' &&
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData && variableData?.name && variableData?.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
} else if (variableData.type === 'CUSTOM') {
const optionsData = sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as never;
setOptionsData(optionsData);
}
};
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
],
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
error: string;
};
}) => {
const { details } = error;
if (details.error) {
let message = details.error;
if ((details.error ?? '').toString().includes('Syntax error:')) {
message =
'Please make sure query is valid and dependent variables are selected';
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass null to avoid storing values
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
onValueUpdate,
optionsData,
variableData.showALLOption,
],
);
// Add a handler for tracking temporary selection changes
const handleTempChange = (inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
};
// Handle dropdown visibility changes
const handleDropdownVisibleChange = (visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
};
// do not debounce the above function as we do not need debounce in select variables
const debouncedHandleChange = debounce(handleChange, 500);
const { selectedValue } = variableData;
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
// Apply default value on first render if no selection exists
// eslint-disable-next-line sonarjs/cognitive-complexity
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
useEffect(() => {
// Fetch options for CUSTOM Type
if (variableData.type === 'CUSTOM') {
getOptions(null);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableData.type, variableData.customValue]);
const { name, description, type: variableType } = variableData;
return (
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
<div className="variable-item">
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
{variableData.description && (
<Tooltip title={variableData.description}>
${name}
{description && (
<Tooltip title={description}>
<InfoCircleOutlined className="info-icon" />
</Tooltip>
)}
</Typography.Text>
<div className="variable-value">
{variableData.type === 'TEXTBOX' ? (
<Input
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
value={textboxInputValue}
title={textboxInputValue}
onChange={(e): void => {
setTextboxInputValue(e.target.value);
}}
onFocus={(): void => {
setIsTextboxFocused(true);
}}
onBlur={(e): void => {
setIsTextboxFocused(false);
const value = e.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
}}
onKeyDown={(e): void => {
if (e.key === 'Enter') {
const value = textboxInputValue.trim();
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
textboxInputRef.current?.blur();
}
}}
{variableType === 'TEXTBOX' && (
<TextboxVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
/>
) : (
optionsData &&
(variableData.multiSelect ? (
<CustomMultiSelect
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
options={optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
}))}
defaultValue={variableData.defaultValue || selectValue}
onChange={handleTempChange}
bordered={false}
placeholder="Select value"
placement="bottomLeft"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
maxTagCount={2}
getPopupContainer={popupContainer}
value={tempSelection || selectValue}
onDropdownVisibleChange={handleDropdownVisibleChange}
errorMessage={errorMessage}
// eslint-disable-next-line react/no-unstable-nested-components
maxTagPlaceholder={(omittedValues): JSX.Element => {
const maxDisplayValues = 10;
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
const hasMore = omittedValues.length > maxDisplayValues;
const tooltipText =
valuesToShow.map(({ value }) => value).join(', ') +
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}}
onClear={(): void => {
handleChange([]);
}}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={selectValue !== ALL_SELECT_VALUE && selectValue !== 'ALL'}
onRetry={(): void => {
setErrorMessage(null);
refetch();
}}
/>
) : (
<CustomSelect
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
defaultValue={variableData.defaultValue || selectValue}
onChange={handleChange}
bordered={false}
placeholder="Select value"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
getPopupContainer={popupContainer}
options={optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
}))}
value={selectValue}
errorMessage={errorMessage}
onRetry={(): void => {
setErrorMessage(null);
refetch();
}}
/>
))
)}
{variableData.type !== 'TEXTBOX' && errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
{variableType === 'CUSTOM' && (
<CustomVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
/>
)}
{variableType === 'QUERY' && (
<QueryVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
)}
</div>
</div>

View File

@@ -0,0 +1,201 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { isEmpty } from 'lodash-es';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { areArraysEqual, getSelectValue } from './util';
interface UseDashboardVariableSelectHelperParams {
variableData: IDashboardVariable;
optionsData: (string | number | boolean)[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
}
interface UseDashboardVariableSelectHelperReturn {
// State
tempSelection: string | string[] | undefined;
setTempSelection: React.Dispatch<
React.SetStateAction<string | string[] | undefined>
>;
value: string | string[] | undefined;
defaultValue: string | string[] | undefined;
// Derived values
enableSelectAll: boolean;
// Handlers
onChange: (value: string | string[]) => void;
onDropdownVisibleChange: (visible: boolean) => void;
handleClear: () => void;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
export function useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
const { selectedValue } = variableData;
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass optionsData as the value and set allSelected to true
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
variableData.showALLOption,
onValueUpdate,
optionsData,
],
);
const handleTempChange = useCallback(
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
},
[variableData.multiSelect],
);
// Apply default value on first render if no selection exists
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
// Apply default values when needed
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
// Handle dropdown visibility changes
const onDropdownVisibleChange = useCallback(
(visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
},
[variableData, tempSelection, handleChange],
);
const handleClear = useCallback((): void => {
handleChange([]);
}, [handleChange]);
const value = variableData.multiSelect
? tempSelection || selectValue
: selectValue;
const defaultValue = variableData.defaultValue || selectValue;
const onChange = useMemo(() => {
return variableData.multiSelect ? handleTempChange : handleChange;
}, [variableData.multiSelect, handleTempChange, handleChange]);
return {
tempSelection,
setTempSelection,
enableSelectAll,
onDropdownVisibleChange,
handleClear,
value,
defaultValue,
onChange,
};
}

View File

@@ -381,3 +381,16 @@ export const uniqueValues = (values: string[] | string): string[] | string => {
return values;
};
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] | undefined => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString();
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString();
};

View File

@@ -0,0 +1,21 @@
.chart-manager-container {
width: 100%;
max-height: calc(40% - 40px);
display: flex;
flex-direction: column;
gap: 16px;
.chart-manager-header {
display: flex;
justify-content: space-between;
align-items: center;
gap: 16px;
.chart-manager-actions-container {
display: flex;
justify-content: flex-end;
align-items: center;
gap: 8px;
}
}
}

View File

@@ -0,0 +1,147 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { Button, Input } from 'antd';
import { ResizeTable } from 'components/ResizeTable';
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
import { useNotifications } from 'hooks/useNotifications';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import './ChartManager.styles.scss';
interface ChartManagerProps {
config: UPlotConfigBuilder;
alignedData: uPlot.AlignedData;
yAxisUnit?: string;
onCancel?: () => void;
}
/**
* ChartManager provides a tabular view to manage the visibility of
* individual series on a uPlot chart.
*
* It syncs with the legend state coming from the plot context and
* allows users to:
* - filter series by label
* - toggle individual series on/off
* - persist the visibility configuration to local storage.
*
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
* @param alignedData - uPlot aligned data used to build the initial table dataset.
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
* @param onCancel - Optional callback invoked when the user cancels the dialog.
*/
export default function ChartManager({
config,
alignedData,
yAxisUnit,
onCancel,
}: ChartManagerProps): JSX.Element {
const { notifications } = useNotifications();
const { legendItemsMap } = useLegendsSync({
config,
subscribeToFocusChange: false,
});
const {
onToggleSeriesOnOff,
onToggleSeriesVisibility,
syncSeriesVisibilityToLocalStorage,
} = usePlotContext();
const { isDashboardLocked } = useDashboard();
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
const graphVisibilityState = useMemo(
() =>
Object.entries(legendItemsMap).reduce<boolean[]>((acc, [key, item]) => {
acc[Number(key)] = item.show;
return acc;
}, []),
[legendItemsMap],
);
useEffect(() => {
setTableDataSet(
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
}, [alignedData, config]);
const filterHandler = useCallback(
(event: React.ChangeEvent<HTMLInputElement>): void => {
const value = event.target.value.toString().toLowerCase();
const updatedDataSet = tableDataSet.map((item) => {
if (item.label?.toLocaleLowerCase().includes(value)) {
return { ...item, show: true };
}
return { ...item, show: false };
});
setTableDataSet(updatedDataSet);
},
[tableDataSet],
);
const dataSource = useMemo(
() =>
tableDataSet.filter(
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
),
[tableDataSet],
);
const columns = useMemo(
() =>
getGraphManagerTableColumns({
tableDataSet,
checkBoxOnChangeHandler: (_e, index) => {
onToggleSeriesOnOff(index);
},
graphVisibilityState,
labelClickedHandler: onToggleSeriesVisibility,
yAxisUnit,
isGraphDisabled: isDashboardLocked,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
);
const handleSave = useCallback((): void => {
syncSeriesVisibilityToLocalStorage();
notifications.success({
message: 'The updated graphs & legends are saved',
});
if (onCancel) {
onCancel();
}
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
return (
<div className="chart-manager-container">
<div className="chart-manager-header">
<Input onChange={filterHandler} placeholder="Filter Series" />
<div className="chart-manager-actions-container">
<Button type="default" onClick={onCancel}>
Cancel
</Button>
<Button type="primary" onClick={handleSave}>
Save
</Button>
</div>
</div>
<div className="chart-manager-table-container">
<ResizeTable
columns={columns}
dataSource={dataSource}
virtual
rowKey="index"
scroll={{ y: 200 }}
pagination={false}
/>
</div>
</div>
);
}

View File

@@ -0,0 +1,120 @@
import { useCallback } from 'react';
import { UseQueryResult } from 'react-query';
import {
getTimeRangeFromStepInterval,
isApmMetric,
} from 'container/PanelWrapper/utils';
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
import {
PopoverPosition,
useCoordinates,
} from 'periscope/components/ContextMenu';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
interface UseTimeSeriesContextMenuParams {
widget: Widgets;
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
}
export const usePanelContextMenu = ({
widget,
queryResponse,
}: UseTimeSeriesContextMenuParams): {
coordinates: { x: number; y: number } | null;
popoverPosition: PopoverPosition | null;
onClose: () => void;
menuItemsConfig: {
header?: string | React.ReactNode;
items?: React.ReactNode;
};
clickHandlerWithContextMenu: (...args: any[]) => void;
} => {
const {
coordinates,
popoverPosition,
clickedData,
onClose,
subMenu,
onClick,
setSubMenu,
} = useCoordinates();
const { menuItemsConfig } = useGraphContextMenu({
widgetId: widget.id || '',
query: widget.query,
graphData: clickedData,
onClose,
coordinates,
subMenu,
setSubMenu,
contextLinks: widget.contextLinks,
panelType: widget.panelTypes,
queryRange: queryResponse,
});
const clickHandlerWithContextMenu = useCallback(
(...args: any[]) => {
const [
xValue,
_yvalue,
_mouseX,
_mouseY,
metric,
queryData,
absoluteMouseX,
absoluteMouseY,
axesData,
focusedSeries,
] = args;
const data = getUplotClickData({
metric,
queryData,
absoluteMouseX,
absoluteMouseY,
focusedSeries,
});
let timeRange;
if (axesData && queryData?.queryName) {
const compositeQuery = (queryResponse?.data?.params as any)?.compositeQuery;
if (compositeQuery?.queries) {
const specificQuery = compositeQuery.queries.find(
(query: any) => query.spec?.name === queryData.queryName,
);
const stepInterval = specificQuery?.spec?.stepInterval || 60;
timeRange = getTimeRangeFromStepInterval(
stepInterval,
metric?.clickedTimestamp || xValue,
specificQuery?.spec?.signal === DataSource.METRICS &&
isApmMetric(specificQuery?.spec?.aggregations[0]?.metricName),
);
}
}
if (data && data?.record?.queryName) {
onClick(data.coord, { ...data.record, label: data.label, timeRange });
}
},
[onClick, queryResponse],
);
return {
coordinates,
popoverPosition,
onClose,
menuItemsConfig,
clickHandlerWithContextMenu,
};
};

View File

@@ -0,0 +1,4 @@
.panel-container {
height: 100%;
width: 100%;
}

View File

@@ -0,0 +1,176 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import { LineInterpolation } from 'lib/uPlotV2/config/types';
import { ContextMenu } from 'periscope/components/ContextMenu';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { getTimeRange } from 'utils/getTimeRange';
import { prepareChartData, prepareUPlotConfig } from '../TimeSeriesPanel/utils';
import '../Panel.styles.scss';
function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
const {
panelMode,
queryResponse,
widget,
onDragSelect,
isFullViewMode,
onToggleModelHandler,
} = props;
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const graphRef = useRef<HTMLDivElement>(null);
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const containerDimensions = useResizeObserver(graphRef);
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
useEffect(() => {
if (toScrollWidgetId === widget.id) {
graphRef.current?.scrollIntoView({
behavior: 'smooth',
block: 'center',
});
graphRef.current?.focus();
setToScrollWidgetId('');
}
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [queryResponse]);
const {
coordinates,
popoverPosition,
onClose,
menuItemsConfig,
clickHandlerWithContextMenu,
} = usePanelContextMenu({
widget,
queryResponse,
});
const chartData = useMemo(() => {
if (!queryResponse?.data?.payload) {
return [];
}
return prepareChartData(queryResponse?.data?.payload);
}, [queryResponse?.data?.payload]);
const config = useMemo(() => {
const tzDate = (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
return prepareUPlotConfig({
widgetId: widget.id || '',
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
tzDate,
minTimeScale: minTimeScale,
maxTimeScale: maxTimeScale,
isLogScale: widget?.isLogScale ?? false,
thresholds: {
scaleKey: 'y',
thresholds: (widget.thresholds || []).map((threshold) => ({
thresholdValue: threshold.thresholdValue ?? 0,
thresholdColor: threshold.thresholdColor,
thresholdUnit: threshold.thresholdUnit,
thresholdLabel: threshold.thresholdLabel,
})),
yAxisUnit: widget.yAxisUnit,
},
yAxisUnit: widget.yAxisUnit || '',
softMin: widget.softMin === undefined ? null : widget.softMin,
softMax: widget.softMax === undefined ? null : widget.softMax,
spanGaps: false,
colorMapping: widget.customLegendColors ?? {},
lineInterpolation: LineInterpolation.Spline,
isDarkMode,
onClick: clickHandlerWithContextMenu,
onDragSelect,
currentQuery: widget.query,
panelMode,
});
}, [
widget.id,
maxTimeScale,
minTimeScale,
timezone.value,
widget.customLegendColors,
widget.isLogScale,
widget.softMax,
widget.softMin,
isDarkMode,
queryResponse?.data?.payload,
widget.query,
widget.thresholds,
widget.yAxisUnit,
panelMode,
clickHandlerWithContextMenu,
onDragSelect,
]);
const layoutChildren = useMemo(() => {
if (!isFullViewMode) {
return null;
}
return (
<ChartManager
config={config}
alignedData={chartData}
yAxisUnit={widget.yAxisUnit}
onCancel={onToggleModelHandler}
/>
);
}, [
isFullViewMode,
config,
chartData,
widget.yAxisUnit,
onToggleModelHandler,
]);
return (
<div className="panel-container" ref={graphRef}>
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
<TimeSeries
config={config}
legendConfig={{
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
}}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}
layoutChildren={layoutChildren}
>
<ContextMenu
coordinates={coordinates}
popoverPosition={popoverPosition}
title={menuItemsConfig.header as string}
items={menuItemsConfig.items}
onClose={onClose}
/>
</TimeSeries>
)}
</div>
);
}
export default TimeSeriesPanel;

View File

@@ -0,0 +1,170 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
fillMissingXAxisTimestamps,
getXAxisTimestamps,
} from 'container/DashboardContainer/visualization/panels/utils';
import { getLegend } from 'lib/dashboard/getQueryResults';
import getLabelName from 'lib/getLabelName';
import onClickPlugin, {
OnClickPluginOpts,
} from 'lib/uPlotLib/plugins/onClickPlugin';
import {
DistributionType,
DrawStyle,
LineInterpolation,
LineStyle,
SelectionPreferencesSource,
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { PanelMode } from '../types';
export const prepareChartData = (
apiResponse: MetricRangePayloadProps,
): uPlot.AlignedData => {
const seriesList = apiResponse?.data?.result || [];
const timestampArr = getXAxisTimestamps(seriesList);
const yAxisValuesArr = fillMissingXAxisTimestamps(timestampArr, seriesList);
return [timestampArr, ...yAxisValuesArr];
};
export const prepareUPlotConfig = ({
widgetId,
apiResponse,
tzDate,
minTimeScale,
maxTimeScale,
isLogScale,
thresholds,
softMin,
softMax,
spanGaps,
colorMapping,
lineInterpolation,
isDarkMode,
currentQuery,
onDragSelect,
onClick,
yAxisUnit,
panelMode,
}: {
widgetId: string;
apiResponse: MetricRangePayloadProps;
tzDate: uPlot.LocalDateFromUnix;
minTimeScale: number | undefined;
maxTimeScale: number | undefined;
isLogScale: boolean;
softMin: number | null;
softMax: number | null;
spanGaps: boolean;
colorMapping: Record<string, string>;
lineInterpolation: LineInterpolation;
isDarkMode: boolean;
thresholds: ThresholdsDrawHookOptions;
currentQuery: Query;
yAxisUnit: string;
onDragSelect: (startTime: number, endTime: number) => void;
onClick?: OnClickPluginOpts['onClick'];
panelMode: PanelMode;
}): UPlotConfigBuilder => {
const builder = new UPlotConfigBuilder({
onDragSelect,
widgetId,
tzDate,
shouldSaveSelectionPreference: panelMode === PanelMode.DASHBOARD_VIEW,
selectionPreferencesSource: [
PanelMode.DASHBOARD_VIEW,
PanelMode.STANDALONE_VIEW,
].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY,
});
// X scale time axis
builder.addScale({
scaleKey: 'x',
time: true,
min: minTimeScale,
max: maxTimeScale,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
// Y scale value axis, driven primarily by softMin/softMax and data
builder.addScale({
scaleKey: 'y',
time: false,
min: undefined,
max: undefined,
softMin: softMin ?? undefined,
softMax: softMax ?? undefined,
thresholds,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
builder.addThresholds(thresholds);
if (typeof onClick === 'function') {
builder.addPlugin(
onClickPlugin({
onClick,
apiResponse,
}),
);
}
builder.addAxis({
scaleKey: 'x',
show: true,
side: 2,
isDarkMode,
isLogScale: false,
panelType: PANEL_TYPES.TIME_SERIES,
});
builder.addAxis({
scaleKey: 'y',
show: true,
side: 3,
isDarkMode,
isLogScale: false,
yAxisUnit,
panelType: PANEL_TYPES.TIME_SERIES,
});
apiResponse.data?.result?.forEach((series) => {
const baseLabelName = getLabelName(
series.metric,
series.queryName || '', // query
series.legend || '',
);
const label = currentQuery
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Line,
label: label,
colorMapping,
spanGaps,
lineStyle: LineStyle.Solid,
lineInterpolation,
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
});
});
return builder;
};

View File

@@ -0,0 +1,54 @@
import { normalizePlotValue } from 'lib/uPlotV2/utils/dataUtils';
import { QueryData } from 'types/api/widgets/getQuery';
export function getXAxisTimestamps(seriesList: QueryData[]): number[] {
const timestamps = new Set<number>();
seriesList.forEach((series: { values?: [number, string][] }) => {
if (series?.values) {
series.values.forEach((value) => {
timestamps.add(value[0]);
});
}
});
const timestampsArr = Array.from(timestamps);
timestampsArr.sort((a, b) => a - b);
return timestampsArr;
}
export function fillMissingXAxisTimestamps(
timestampArr: number[],
data: Array<{ values?: [number, string][] }>,
): (number | null)[][] {
// Ensure we work with a sorted, deduplicated list of x-axis timestamps
const canonicalTimestamps = Array.from(new Set(timestampArr)).sort(
(a, b) => a - b,
);
return data.map(({ values }) =>
buildSeriesYValues(canonicalTimestamps, values),
);
}
function buildSeriesYValues(
timestamps: number[],
values?: [number, string][],
): (number | null)[] {
if (!values?.length) {
return [];
}
const valueByTimestamp = new Map<number, number | null>();
for (let i = 0; i < values.length; i++) {
const [timestamp, rawValue] = values[i];
valueByTimestamp.set(timestamp, normalizePlotValue(rawValue));
}
return timestamps.map((timestamp) => {
const value = valueByTimestamp.get(timestamp);
return value !== undefined ? value : null;
});
}

View File

@@ -14,7 +14,7 @@ describe('Get Series Data', () => {
expect(seriesData.length).toBe(5);
expect(seriesData[1].label).toBe('firstLegend');
expect(seriesData[1].show).toBe(true);
expect(seriesData[1].fill).toBe('#C71585');
expect(seriesData[1].fill).toBe('#FF6F91');
expect(seriesData[1].width).toBe(2);
});

View File

@@ -24,6 +24,14 @@
opacity: 1;
}
.legend-empty-state {
font-size: 12px;
color: var(--bg-vanilla-400);
text-align: center;
padding: 12px;
padding: 2rem 0;
}
.legend-virtuoso-container {
height: 100%;
width: 100%;

View File

@@ -81,6 +81,13 @@ export default function Legend({
[focusedSeriesIndex, position],
);
const isEmptyState = useMemo(() => {
if (position !== LegendPosition.RIGHT || !legendSearchQuery.trim()) {
return false;
}
return visibleLegendItems.length === 0;
}, [position, legendSearchQuery, visibleLegendItems]);
return (
<div
ref={legendContainerRef}
@@ -103,15 +110,21 @@ export default function Legend({
/>
</div>
)}
<VirtuosoGrid
className={cx(
'legend-virtuoso-container',
`legend-virtuoso-container-${position.toLowerCase()}`,
{ 'legend-virtuoso-container-single-row': isSingleRow },
)}
data={visibleLegendItems}
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
/>
{isEmptyState ? (
<div className="legend-empty-state">
No series found matching &quot;{legendSearchQuery}&quot;
</div>
) : (
<VirtuosoGrid
className={cx(
'legend-virtuoso-container',
`legend-virtuoso-container-${position.toLowerCase()}`,
{ 'legend-virtuoso-container-single-row': isSingleRow },
)}
data={visibleLegendItems}
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
/>
)}
</div>
);
}

View File

@@ -4,6 +4,7 @@ import {
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -62,9 +63,30 @@ export function buildDependencyData(
};
}
/**
* Initialize the variable fetch store with the computed dependency data
*/
function initializeFetchStore(
sortedVariablesArray: IDashboardVariable[],
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
@@ -75,15 +97,22 @@ export function computeDerivedValues(
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
}
/**
* Update derived values in the store state (for use with immer)
* Also initializes the variable fetch store with the new dependency data
*/
export function updateDerivedValues(
draft: IDashboardVariablesStoreState,
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
}

View File

@@ -0,0 +1,57 @@
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import createStore from './store';
// Fetch state for each variable
export type VariableFetchState =
| 'idle' // stable state - initial or complete
| 'loading' // actively fetching data (first time)
| 'revalidating' // refetching existing data
| 'waiting' // blocked on parent dependencies
| 'error';
export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
lastUpdated: Record<string, number>;
}
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
initialState,
);
// ============== Actions ==============
/**
* Initialize the store with dependency graphs and set initial states
*/
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
variableFetchStore.update((draft) => {
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
});
}

View File

@@ -1,127 +0,0 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

View File

@@ -1,162 +0,0 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,50 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -44,6 +45,7 @@ type provider struct {
gatewayHandler gateway.Handler
roleGetter role.Getter
roleHandler role.Handler
fieldsHandler fields.Handler
}
func NewFactory(
@@ -63,9 +65,31 @@ func NewFactory(
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
roleGetter,
roleHandler,
fieldsHandler,
)
})
}
@@ -89,6 +113,7 @@ func newProvider(
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -111,6 +136,7 @@ func newProvider(
gatewayHandler: gatewayHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
fieldsHandler: fieldsHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
@@ -175,6 +201,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -325,7 +325,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusBadRequest},
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnprocessableEntity},
Deprecated: false,
SecuritySchemes: []handler.OpenAPISecurityScheme{},
})).Methods(http.MethodPost).GetError(); err != nil {

View File

@@ -0,0 +1,11 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -0,0 +1,79 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -369,7 +369,7 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error {
if !module.config.Password.Reset.AllowSelf {
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "users are not allowed to reset their password themselves, please contact an admin to reset your password")
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
}
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)

View File

@@ -25,7 +25,6 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -145,8 +144,6 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -177,8 +174,6 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -243,7 +238,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -399,13 +393,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)

View File

@@ -17,7 +17,6 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -215,7 +213,6 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -71,7 +71,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
// accustom log filters like `body.log.message EXISTS` into EXPR language
// where User is attempting to check for keys present in JSON log body
if strings.HasPrefix(v.Key.Key, "body.") {
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
// if body is a string and is a valid JSON, then check if the key exists in the JSON
filter = fmt.Sprintf(`((type(body) == "string" && isJSON(body)) && %s %s %s)`, exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
// if body is a map, then check if the key exists in the map
operator := v3.FilterOperatorNotEqual
if v.Operator == v3.FilterOperatorNotExists {
operator = v3.FilterOperatorEqual
}
nilCheckFilter := fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
// join the two filters with OR
filter = fmt.Sprintf(`(%s or (type(body) == "map" && (%s)))`, filter, nilCheckFilter)
} else if typ := getTypeName(v.Key.Type); typ != "" {
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
} else {

View File

@@ -3,203 +3,538 @@ package queryBuilderToExpr
import (
"testing"
signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
. "github.com/smartystreets/goconvey/convey"
"github.com/expr-lang/expr/vm"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
"github.com/stretchr/testify/assert"
)
var testCases = []struct {
Name string
Query *v3.FilterSet
Expr string
ExpectError bool
}{
{
Name: "equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
}},
Expr: `attributes["key"] == "checkbody"`,
},
{
Name: "not equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
}},
Expr: `attributes["key"] != "checkbody"`,
},
{
Name: "less than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
}},
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
},
{
Name: "greater than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
}},
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
},
{
Name: "less than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
},
{
Name: "greater than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
}},
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
},
// case sensitive
{
Name: "body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
Expr: `body != nil && lower(body) contains lower("checkbody")`,
},
{
Name: "body.log.message exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `"log.message" in fromJSON(body)`,
},
{
Name: "body.log.message not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `"log.message" not in fromJSON(body)`,
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
}},
Expr: `body != nil && body matches "[0-1]+regex$"`,
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
}},
Expr: `body != nil && body not matches "[0-1]+regex$"`,
},
{
Name: "regex with escape characters",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
}},
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
},
{
Name: "invalid regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
}},
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
ExpectError: true,
},
{
Name: "in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{1, 2, 3, 4}, Operator: "in"},
}},
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
},
{
Name: "not in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"1", "2"}, Operator: "nin"},
}},
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
},
{
Name: "exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
Expr: `"key" in attributes`,
},
{
Name: "not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `"key" not in attributes`,
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `trace_id == nil`,
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `trace_id != nil`,
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `span_id == nil`,
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `span_id != nil`,
},
{
Name: "Multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
},
{
Name: "incorrect multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
ExpectError: true,
},
}
func TestParseExpression(t *testing.T) {
var testCases = []struct {
Name string
Query *v3.FilterSet
Expr string
ExpectError bool
}{
{
Name: "equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
}},
Expr: `attributes["key"] == "checkbody"`,
},
{
Name: "not equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
}},
Expr: `attributes["key"] != "checkbody"`,
},
{
Name: "less than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
}},
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
},
{
Name: "greater than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
}},
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
},
{
Name: "less than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
},
{
Name: "greater than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
}},
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
},
// case sensitive
{
Name: "body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
Expr: `body != nil && lower(body) contains lower("checkbody")`,
},
{
Name: "body.log.message exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" in fromJSON(body)) or (type(body) == "map" && (body.log.message != nil)))`,
},
{
Name: "body.log.message not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" not in fromJSON(body)) or (type(body) == "map" && (body.log.message == nil)))`,
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
}},
Expr: `body != nil && body matches "[0-1]+regex$"`,
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
}},
Expr: `body != nil && body not matches "[0-1]+regex$"`,
},
{
Name: "regex with escape characters",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
}},
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
},
{
Name: "invalid regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
}},
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
ExpectError: true,
},
{
Name: "in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{1, 2, 3, 4}, Operator: "in"},
}},
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
},
{
Name: "not in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"1", "2"}, Operator: "nin"},
}},
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
},
{
Name: "exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
Expr: `"key" in attributes`,
},
{
Name: "not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `"key" not in attributes`,
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `trace_id == nil`,
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `trace_id != nil`,
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `span_id == nil`,
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `span_id != nil`,
},
{
Name: "Multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
},
{
Name: "incorrect multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
ExpectError: true,
},
}
func TestParse(t *testing.T) {
for _, tt := range testCases {
Convey(tt.Name, t, func() {
t.Run(tt.Name, func(t *testing.T) {
x, err := Parse(tt.Query)
if tt.ExpectError {
So(err, ShouldNotBeNil)
assert.Error(t, err)
} else {
So(err, ShouldBeNil)
So(x, ShouldEqual, tt.Expr)
assert.NoError(t, err)
assert.Equal(t, tt.Expr, x)
}
})
}
}
type EntryComposite struct {
ID int
*entry.Entry
}
// makeEntry creates an EntryComposite for tests. Pass nil for traceID/spanID to mean "not set".
func makeEntry(id int, body any, attributes, resource map[string]any, traceID, spanID []byte) EntryComposite {
e := entry.New()
e.Body = body
if attributes != nil {
e.Attributes = attributes
} else {
e.Attributes = make(map[string]any)
}
if resource != nil {
e.Resource = resource
} else {
e.Resource = make(map[string]any)
}
if traceID != nil {
e.TraceID = traceID
}
if spanID != nil {
e.SpanID = spanID
}
return EntryComposite{ID: id, Entry: e}
}
func TestExpressionVSEntry(t *testing.T) {
// Dataset: entries with varied body (JSON and plain text), attributes, trace_id, span_id for filter testing.
// IDs 0..12: JSON bodies (body.msg / body.log etc. work). IDs 13..17: simple text log bodies.
dataset := []EntryComposite{
// JSON body entries (0-12)
makeEntry(0, `{"msg":"hello world"}`, map[string]any{"level": "info"}, map[string]any{"env": "prod", "host": "node-0"}, nil, nil),
makeEntry(1, `{"msg":"error occurred", "missing": "value"}`, map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-1"}, []byte("trace1"), []byte("span1")),
makeEntry(2, `{"msg":"checkbody substring"}`, map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-2"}, []byte("trace2"), nil),
makeEntry(3, `{"msg":"no match here"}`, map[string]any{"level": "debug"}, map[string]any{"env": "staging", "host": "node-3"}, nil, []byte("span3")),
makeEntry(4, `{"msg":"101regex suffix"}`, map[string]any{"code": "200", "count": int64(5)}, map[string]any{"env": "prod", "host": "node-4"}, nil, nil),
makeEntry(5, `{"msg":"plain text only"}`, map[string]any{"code": "404", "count": int64(10)}, map[string]any{"env": "prod", "host": "node-5"}, []byte("trace5"), []byte("span5")),
makeEntry(6, `{"log":{"message":"user login"}}`, map[string]any{"service": "auth"}, map[string]any{"env": "dev", "host": "node-6"}, nil, nil),
makeEntry(7, `{"log":{"message":"user logout"}}`, map[string]any{"service": "auth", "user_id": "u1"}, map[string]any{"env": "dev", "host": "node-7"}, []byte("trace7"), nil),
makeEntry(8, `{"event":"click"}`, map[string]any{"service": "api"}, map[string]any{"env": "dev", "host": "node-8"}, nil, nil),
makeEntry(9, `{"msg":"checkbody"}`, map[string]any{"tag": "exact", "num": int64(9)}, map[string]any{"env": "prod", "host": "node-9"}, nil, nil),
makeEntry(10, `{"msg":"CHECKBODY case"}`, map[string]any{"tag": "case", "num": int64(10)}, map[string]any{"env": "staging", "host": "node-10"}, nil, nil),
makeEntry(11, `{"msg":"foo"}`, map[string]any{"status": "active", "score": int64(100)}, map[string]any{"env": "prod", "host": "node-11"}, nil, nil),
makeEntry(12, `{"msg":"bar"}`, map[string]any{"status": "inactive", "score": int64(50)}, map[string]any{"env": "staging", "host": "node-12"}, []byte("trace12"), []byte("span12")),
// Plain text log body entries (13-17)
makeEntry(13, "Server started on port 8080", map[string]any{"component": "server"}, map[string]any{"env": "prod", "host": "node-13"}, nil, nil),
makeEntry(14, "Connection refused to 10.0.0.1:5432", map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-14"}, nil, nil),
makeEntry(15, "User login failed for admin", map[string]any{"service": "auth", "level": "warn"}, map[string]any{"env": "dev", "host": "node-15"}, []byte("trace15"), nil),
makeEntry(16, "checkbody in text log", map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-16"}, nil, nil),
makeEntry(17, "WARN: disk full on /var", map[string]any{"level": "warn"}, map[string]any{"env": "prod", "host": "node-17"}, nil, []byte("span17")),
// Body as map (not string) entries (18-20)
makeEntry(18, map[string]any{"msg": "checkbody substring", "level": "info"}, map[string]any{"source": "map"}, map[string]any{"env": "prod", "host": "node-18"}, nil, nil),
makeEntry(19, map[string]any{"log": map[string]any{"message": "nested value in map body"}, "missing": true}, map[string]any{"source": "map"}, map[string]any{"env": "staging", "host": "node-19"}, []byte("trace19"), nil),
makeEntry(20, map[string]any{"event": "deploy", "version": "1.2.0"}, map[string]any{"source": "map", "level": "info"}, map[string]any{"env": "dev", "host": "node-20"}, nil, []byte("span20")),
}
var testCases = []struct {
Name string
Query *v3.FilterSet
ExpectedMatches []int
}{
{
Name: "resource equal (env)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{0, 1, 4, 5, 9, 11, 13, 14, 17, 18},
},
{
Name: "resource not equal (env)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "!="},
}},
ExpectedMatches: []int{2, 3, 6, 7, 8, 10, 12, 15, 16, 19, 20},
},
{
Name: "attribute less than (numeric)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: "<"},
}},
ExpectedMatches: []int{4},
},
{
Name: "attribute greater than (numeric)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: ">"},
}},
ExpectedMatches: []int{5},
},
{
Name: "body contains (case insensitive)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2, 9, 10, 16},
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
ExpectedMatches: []int{0, 1, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 17},
},
{
Name: "body.msg (case insensitive)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: false}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2, 9, 10, 18},
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
}},
ExpectedMatches: []int{4},
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "nregex"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
},
// body.log.message exists/nexists: expr checks "log.message" in fromJSON(body); nested key
// semantics depend on signoz stanza helper. Omitted here to avoid coupling to env shape.
{
Name: "body top-level key exists (body.msg)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 18},
},
{
Name: "body top-level key not exists (body.missing)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.missing", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 18, 20},
},
{
Name: "attribute exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
ExpectedMatches: []int{6, 7, 8, 15},
},
{
Name: "attribute not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20},
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{1, 2, 5, 7, 12, 15, 19},
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 16, 17, 18, 20},
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{1, 3, 5, 12, 17, 20},
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 2, 4, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 18, 19},
},
{
Name: "in (attribute in list)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"info", "error"}, Operator: "in"},
}},
ExpectedMatches: []int{0, 1, 2, 14, 16, 20},
},
{
Name: "not in (attribute not in list)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"error", "warn"}, Operator: "nin"},
}},
ExpectedMatches: []int{0, 2, 3, 16, 20},
},
{
Name: "multi filter AND",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
}},
ExpectedMatches: []int{2, 16},
},
{
Name: "multi filter AND (two attributes)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
ExpectedMatches: []int{6, 7},
},
// Multi-filter variations: body + attribute, three conditions, trace/span + attribute
{
Name: "multi filter AND body contains + attribute",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "Connection", Operator: "contains"},
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{14},
},
{
Name: "multi filter AND body contains + service",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "login", Operator: "contains"},
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
}},
ExpectedMatches: []int{6, 15},
},
{
Name: "multi filter AND env + level (prod error)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "error", Operator: "="},
}},
ExpectedMatches: []int{1, 14},
},
{
Name: "multi filter AND three conditions (staging + checkbody + info)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
}},
ExpectedMatches: []int{2, 16},
},
{
Name: "multi filter AND trace_id exists + body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2},
},
{
Name: "multi filter AND span_id nexists + service auth",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
}},
ExpectedMatches: []int{6, 7, 15},
},
{
Name: "multi filter AND body regex + attribute",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: "="},
}},
ExpectedMatches: []int{4},
},
{
Name: "multi filter AND no trace_id + no span_id + env prod",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{0, 4, 9, 11, 13, 14, 18},
},
{
Name: "multi filter AND level warn + body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "warn", Operator: "="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "disk", Operator: "contains"},
}},
ExpectedMatches: []int{17},
},
{
Name: "no matches (attribute value not present)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "never", Operator: "="},
}},
ExpectedMatches: []int{},
},
{
Name: "attribute equal and trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "404", Operator: "="},
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{5},
},
}
for _, tt := range testCases {
t.Run(tt.Name, func(t *testing.T) {
expression, err := Parse(tt.Query)
assert.NoError(t, err)
compiled, hasBodyFieldRef, err := signozstanzahelper.ExprCompileBool(expression)
assert.NoError(t, err)
matchedIDs := []int{}
for _, d := range dataset {
env := signozstanzahelper.GetExprEnv(d.Entry, hasBodyFieldRef)
matches, err := vm.Run(compiled, env)
signozstanzahelper.PutExprEnv(env)
if err != nil {
// Eval error (e.g. fromJSON on non-JSON body) => treat as no match
continue
}
if matches != nil && matches.(bool) {
matchedIDs = append(matchedIDs, d.ID)
}
}
assert.Equal(t, tt.ExpectedMatches, matchedIDs, "query %q", tt.Name)
})
}
}

View File

@@ -11,6 +11,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
@@ -28,6 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -44,9 +47,19 @@ type Handlers struct {
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Role role.Handler
Fields fields.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -61,5 +74,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
}
}

View File

@@ -46,7 +46,7 @@ func TestNewHandlers(t *testing.T) {
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -2,6 +2,7 @@ package signoz
import (
"context"
"net/http"
"os"
"reflect"
@@ -15,6 +16,7 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -23,6 +25,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/gorilla/mux"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
@@ -52,11 +56,16 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ gateway.Handler }{},
struct{ role.Getter }{},
struct{ role.Handler }{},
struct{ fields.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -97,3 +106,25 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
return os.WriteFile(path, spec, 0o600)
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
},
)).Methods(http.MethodPost)
}

View File

@@ -249,6 +249,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.GatewayHandler,
modules.RoleGetter,
handlers.Role,
handlers.Fields,
),
)
}

View File

@@ -396,7 +396,7 @@ func New(
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -36,8 +36,8 @@ type ChangePasswordRequest struct {
}
type PostableForgotPassword struct {
OrgID valuer.UUID `json:"orgId"`
Email valuer.Email `json:"email"`
OrgID valuer.UUID `json:"orgId" required:"true"`
Email valuer.Email `json:"email" required:"true"`
FrontendBaseURL string `json:"frontendBaseURL"`
}

View File

@@ -33,8 +33,8 @@ type LimitConfig struct {
}
type LimitValue struct {
Size int64 `json:"size,omitempty"`
Count int64 `json:"count,omitempty"`
Size int64 `json:"size"`
Count int64 `json:"count"`
}
type LimitMetric struct {

View File

@@ -31,12 +31,19 @@ var (
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
)
func (TreemapMode) Enum() []any {
return []any{
TreemapModeTimeSeries,
TreemapModeSamples,
}
}
// StatsRequest represents the payload accepted by the metrics stats endpoint.
type StatsRequest struct {
Filter *qbtypes.Filter `json:"filter,omitempty"`
Start int64 `json:"start"`
End int64 `json:"end"`
Limit int `json:"limit"`
Start int64 `json:"start" required:"true"`
End int64 `json:"end" required:"true"`
Limit int `json:"limit" required:"true"`
Offset int `json:"offset"`
OrderBy *qbtypes.OrderBy `json:"orderBy,omitempty"`
}
@@ -96,26 +103,26 @@ func (req *StatsRequest) UnmarshalJSON(data []byte) error {
// Stat represents the summary information returned per metric.
type Stat struct {
MetricName string `json:"metricName"`
Description string `json:"description"`
MetricType metrictypes.Type `json:"type"`
MetricUnit string `json:"unit"`
TimeSeries uint64 `json:"timeseries"`
Samples uint64 `json:"samples"`
MetricName string `json:"metricName" required:"true"`
Description string `json:"description" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true"`
MetricUnit string `json:"unit" required:"true"`
TimeSeries uint64 `json:"timeseries" required:"true"`
Samples uint64 `json:"samples" required:"true"`
}
// StatsResponse represents the aggregated metrics statistics.
type StatsResponse struct {
Metrics []Stat `json:"metrics"`
Total uint64 `json:"total"`
Metrics []Stat `json:"metrics" required:"true" nullable:"true"`
Total uint64 `json:"total" required:"true"`
}
type MetricMetadata struct {
Description string `json:"description"`
MetricType metrictypes.Type `json:"type"`
MetricUnit string `json:"unit"`
Temporality metrictypes.Temporality `json:"temporality"`
IsMonotonic bool `json:"isMonotonic"`
Description string `json:"description" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true"`
MetricUnit string `json:"unit" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
IsMonotonic bool `json:"isMonotonic" required:"true"`
}
// MarshalBinary implements cachetypes.Cacheable interface
@@ -130,21 +137,21 @@ func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
type UpdateMetricMetadataRequest struct {
MetricName string `json:"metricName"`
Type metrictypes.Type `json:"type"`
Description string `json:"description"`
Unit string `json:"unit"`
Temporality metrictypes.Temporality `json:"temporality"`
IsMonotonic bool `json:"isMonotonic"`
MetricName string `json:"metricName" required:"true"`
Type metrictypes.Type `json:"type" required:"true"`
Description string `json:"description" required:"true"`
Unit string `json:"unit" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
IsMonotonic bool `json:"isMonotonic" required:"true"`
}
// TreemapRequest represents the payload for the metrics treemap endpoint.
type TreemapRequest struct {
Filter *qbtypes.Filter `json:"filter,omitempty"`
Start int64 `json:"start"`
End int64 `json:"end"`
Limit int `json:"limit"`
Mode TreemapMode `json:"mode"`
Start int64 `json:"start" required:"true"`
End int64 `json:"end" required:"true"`
Limit int `json:"limit" required:"true"`
Mode TreemapMode `json:"mode" required:"true"`
}
// Validate enforces basic constraints on TreemapRequest.
@@ -210,52 +217,52 @@ func (req *TreemapRequest) UnmarshalJSON(data []byte) error {
// TreemapEntry represents each node in the treemap response.
type TreemapEntry struct {
MetricName string `json:"metricName"`
Percentage float64 `json:"percentage"`
TotalValue uint64 `json:"totalValue"`
MetricName string `json:"metricName" required:"true"`
Percentage float64 `json:"percentage" required:"true"`
TotalValue uint64 `json:"totalValue" required:"true"`
}
// TreemapResponse is the output structure for the treemap endpoint.
type TreemapResponse struct {
TimeSeries []TreemapEntry `json:"timeseries"`
Samples []TreemapEntry `json:"samples"`
TimeSeries []TreemapEntry `json:"timeseries" required:"true" nullable:"true"`
Samples []TreemapEntry `json:"samples" required:"true" nullable:"true"`
}
// MetricAlert represents an alert associated with a metric.
type MetricAlert struct {
AlertName string `json:"alertName"`
AlertID string `json:"alertId"`
AlertName string `json:"alertName" required:"true"`
AlertID string `json:"alertId" required:"true"`
}
// MetricAlertsResponse represents the response for metric alerts endpoint.
type MetricAlertsResponse struct {
Alerts []MetricAlert `json:"alerts"`
Alerts []MetricAlert `json:"alerts" required:"true" nullable:"true"`
}
// MetricDashboard represents a dashboard/widget referencing a metric.
type MetricDashboard struct {
DashboardName string `json:"dashboardName"`
DashboardID string `json:"dashboardId"`
WidgetID string `json:"widgetId"`
WidgetName string `json:"widgetName"`
DashboardName string `json:"dashboardName" required:"true"`
DashboardID string `json:"dashboardId" required:"true"`
WidgetID string `json:"widgetId" required:"true"`
WidgetName string `json:"widgetName" required:"true"`
}
// MetricDashboardsResponse represents the response for metric dashboards endpoint.
type MetricDashboardsResponse struct {
Dashboards []MetricDashboard `json:"dashboards"`
Dashboards []MetricDashboard `json:"dashboards" required:"true" nullable:"true"`
}
// MetricHighlightsResponse is the output structure for the metric highlights endpoint.
type MetricHighlightsResponse struct {
DataPoints uint64 `json:"dataPoints"`
LastReceived uint64 `json:"lastReceived"`
TotalTimeSeries uint64 `json:"totalTimeSeries"`
ActiveTimeSeries uint64 `json:"activeTimeSeries"`
DataPoints uint64 `json:"dataPoints" required:"true"`
LastReceived uint64 `json:"lastReceived" required:"true"`
TotalTimeSeries uint64 `json:"totalTimeSeries" required:"true"`
ActiveTimeSeries uint64 `json:"activeTimeSeries" required:"true"`
}
// MetricAttributesRequest represents the payload for the metric attributes endpoint.
type MetricAttributesRequest struct {
MetricName string `json:"metricName"`
MetricName string `json:"metricName" required:"true"`
Start *int64 `json:"start,omitempty"`
End *int64 `json:"end,omitempty"`
}
@@ -292,17 +299,17 @@ func (req *MetricAttributesRequest) UnmarshalJSON(data []byte) error {
// MetricAttribute represents a single attribute with its values and count.
type MetricAttribute struct {
Key string `json:"key"`
Values []string `json:"values"`
ValueCount uint64 `json:"valueCount"`
Key string `json:"key" required:"true"`
Values []string `json:"values" required:"true" nullable:"true"`
ValueCount uint64 `json:"valueCount" required:"true"`
}
// MetricAttributesResponse is the output structure for the metric attributes endpoint.
type MetricAttributesResponse struct {
Attributes []MetricAttribute `json:"attributes"`
TotalKeys int64 `json:"totalKeys"`
Attributes []MetricAttribute `json:"attributes" required:"true" nullable:"true"`
TotalKeys int64 `json:"totalKeys" required:"true"`
}
type MetricNameParams struct {
MetricName string `query:"metricName"`
MetricName string `query:"metricName" required:"true"`
}

View File

@@ -36,7 +36,7 @@ func (t Temporality) Value() (driver.Value, error) {
}
}
func (t *Temporality) Scan(src interface{}) error {
func (t *Temporality) Scan(src any) error {
if src == nil {
*t = Unknown
return nil
@@ -66,6 +66,14 @@ func (t *Temporality) Scan(src interface{}) error {
return nil
}
func (Temporality) Enum() []any {
return []any{
Delta,
Cumulative,
Unspecified,
}
}
// Type is the type of the metric in OTLP data model
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
type Type struct {
@@ -130,10 +138,20 @@ var (
SumType = Type{valuer.NewString("sum")}
HistogramType = Type{valuer.NewString("histogram")}
SummaryType = Type{valuer.NewString("summary")}
ExpHistogramType = Type{valuer.NewString("exponential_histogram")}
ExpHistogramType = Type{valuer.NewString("exponentialhistogram")}
UnspecifiedType = Type{valuer.NewString("")}
)
func (Type) Enum() []any {
return []any{
GaugeType,
SumType,
HistogramType,
SummaryType,
ExpHistogramType,
}
}
type TimeAggregation struct {
valuer.String
}
@@ -151,6 +169,21 @@ var (
TimeAggregationIncrease = TimeAggregation{valuer.NewString("increase")}
)
func (TimeAggregation) Enum() []any {
return []any{
TimeAggregationUnspecified,
TimeAggregationLatest,
TimeAggregationSum,
TimeAggregationAvg,
TimeAggregationMin,
TimeAggregationMax,
TimeAggregationCount,
TimeAggregationCountDistinct,
TimeAggregationRate,
TimeAggregationIncrease,
}
}
type SpaceAggregation struct {
valuer.String
}
@@ -169,6 +202,22 @@ var (
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
)
func (SpaceAggregation) Enum() []any {
return []any{
SpaceAggregationUnspecified,
SpaceAggregationSum,
SpaceAggregationAvg,
SpaceAggregationMin,
SpaceAggregationMax,
SpaceAggregationCount,
SpaceAggregationPercentile50,
SpaceAggregationPercentile75,
SpaceAggregationPercentile90,
SpaceAggregationPercentile95,
SpaceAggregationPercentile99,
}
}
func (s SpaceAggregation) IsPercentile() bool {
return s == SpaceAggregationPercentile50 ||
s == SpaceAggregationPercentile75 ||

View File

@@ -0,0 +1,707 @@
package querybuildertypesv5
import (
"github.com/swaggest/jsonschema-go"
)
// Enum returns the acceptable values for QueryType.
func (QueryType) Enum() []any {
return []any{
QueryTypeBuilder,
QueryTypeFormula,
// Not yet supported.
// QueryTypeSubQuery,
// QueryTypeJoin,
QueryTypeTraceOperator,
QueryTypeClickHouseSQL,
QueryTypePromQL,
}
}
// Enum returns the acceptable values for RequestType.
func (RequestType) Enum() []any {
return []any{
RequestTypeScalar,
RequestTypeTimeSeries,
RequestTypeRaw,
RequestTypeRawStream,
RequestTypeTrace,
// RequestTypeDistribution,
}
}
// Enum returns the acceptable values for FunctionName.
func (FunctionName) Enum() []any {
return []any{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero,
}
}
// Enum returns the acceptable values for OrderDirection.
func (OrderDirection) Enum() []any {
return []any{
OrderDirectionAsc,
OrderDirectionDesc,
}
}
// Enum returns the acceptable values for ReduceTo.
func (ReduceTo) Enum() []any {
return []any{
ReduceToSum,
ReduceToCount,
ReduceToAvg,
ReduceToMin,
ReduceToMax,
ReduceToLast,
ReduceToMedian,
}
}
// Enum returns the acceptable values for VariableType.
func (VariableType) Enum() []any {
return []any{
QueryVariableType,
DynamicVariableType,
CustomVariableType,
TextBoxVariableType,
}
}
// Enum returns the acceptable values for JoinType.
func (JoinType) Enum() []any {
return []any{
JoinTypeInner,
JoinTypeLeft,
JoinTypeRight,
JoinTypeFull,
JoinTypeCross,
}
}
// Enum returns the acceptable values for ColumnType.
func (ColumnType) Enum() []any {
return []any{
ColumnTypeGroup,
ColumnTypeAggregation,
}
}
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
type queryEnvelopeBuilderTrace struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
}
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
type queryEnvelopeBuilderLog struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
}
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
type queryEnvelopeBuilderMetric struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
}
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
type queryEnvelopeFormula struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
}
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
// type queryEnvelopeJoin struct {
// Type QueryType `json:"type" description:"The type of the query."`
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
// }
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
type queryEnvelopeTraceOperator struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
}
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
type queryEnvelopePromQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
}
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
type queryEnvelopeClickHouseSQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
}
var _ jsonschema.OneOfExposer = QueryEnvelope{}
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
// Each variant represents a different query type with its corresponding spec schema.
func (QueryEnvelope) JSONSchemaOneOf() []any {
return []any{
queryEnvelopeBuilderTrace{},
queryEnvelopeBuilderLog{},
queryEnvelopeBuilderMetric{},
queryEnvelopeFormula{},
// queryEnvelopeJoin{},
queryEnvelopeTraceOperator{},
queryEnvelopePromQL{},
queryEnvelopeClickHouseSQL{},
}
}
var _ jsonschema.Exposer = Step{}
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
numSchema.WithDescription("Duration in seconds.")
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
var _ jsonschema.OneOfExposer = QueryData{}
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
func (QueryData) JSONSchemaOneOf() []any {
return []any{
TimeSeriesData{},
ScalarData{},
RawData{},
}
}
var _ jsonschema.Preparer = &QueryRangeRequest{}
// PrepareJSONSchema adds examples and description to the QueryRangeRequest schema.
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
schema.WithExamples(
// 1. time_series + traces builder: count spans grouped by service, ordered by count
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "span_count",
},
},
"stepInterval": "60s",
"filter": map[string]any{
"expression": "service.name = 'frontend'",
},
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "span_count"},
"direction": "desc",
},
},
"limit": 10,
},
},
},
},
},
// 2. time_series + logs builder: count logs grouped by service
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "log_count",
},
},
"stepInterval": "60s",
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "log_count"},
"direction": "desc",
},
},
"limit": 10,
},
},
},
},
},
// 3. time_series + metrics builder (Gauge): latest value averaged across series
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "system.cpu.utilization",
"timeAggregation": "latest",
"spaceAggregation": "avg",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "host.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 4. time_series + metrics builder (Sum): rate of cumulative counter
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.count",
"timeAggregation": "rate",
"spaceAggregation": "sum",
},
},
"stepInterval": 120,
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 5. time_series + metrics builder (Histogram): p99 latency
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.bucket",
"spaceAggregation": "p99",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 6. raw + logs builder: fetch raw log records
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
"selectFields": []any{
map[string]any{
"name": "body",
"fieldContext": "log",
},
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "timestamp", "fieldContext": "log"},
"direction": "desc",
},
map[string]any{
"key": map[string]any{"name": "id"},
"direction": "desc",
},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
// 7. raw + traces builder: fetch raw span records
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{
"expression": "service.name = 'frontend' AND has_error = true",
},
"selectFields": []any{
map[string]any{
"name": "name",
"fieldContext": "span",
},
map[string]any{
"name": "duration_nano",
"fieldContext": "span",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "timestamp", "fieldContext": "span"},
"direction": "desc",
},
},
"limit": 100,
},
},
},
},
},
// 8. scalar + traces builder: total span count as a single value
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "span_count",
},
},
"filter": map[string]any{
"expression": "service.name = 'frontend'",
},
},
},
},
},
},
// 9. scalar + logs builder: total error log count as a single value
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "error_count",
},
},
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
},
},
},
},
},
// 10. scalar + metrics builder: single reduced value with reduceTo
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.count",
"timeAggregation": "rate",
"spaceAggregation": "sum",
"reduceTo": "sum",
},
},
"stepInterval": "60s",
},
},
},
},
},
// 11. builder formula: error rate from two trace queries
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "countIf(has_error = true)",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
// 12. PromQL query with UTF-8 dot metric name
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
// 13. ClickHouse SQL — time_series
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime GROUP BY ts ORDER BY ts",
},
},
},
},
},
// 14. ClickHouse SQL — raw
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "SELECT timestamp, body FROM signoz_logs.distributed_logs_v2 WHERE timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano AND severity_text = 'ERROR' ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
// 15. ClickHouse SQL — scalar
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "SELECT count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime",
},
},
},
},
},
)
return nil
}
var _ jsonschema.Preparer = &QueryRangeResponse{}
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
return nil
}
var _ jsonschema.Preparer = &CompositeQuery{}
// PrepareJSONSchema adds description to the CompositeQuery schema.
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
return nil
}
var _ jsonschema.Preparer = &ExecStats{}
// PrepareJSONSchema adds description to the ExecStats schema.
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
return nil
}

View File

@@ -266,3 +266,110 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
type GettableFieldKeys struct {
Keys map[string][]*TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
type PostableFieldKeysParams struct {
Signal Signal `query:"signal"`
Source Source `query:"source"`
Limit int `query:"limit"`
StartUnixMilli int64 `query:"startUnixMilli"`
EndUnixMilli int64 `query:"endUnixMilli"`
FieldContext FieldContext `query:"fieldContext"`
FieldDataType FieldDataType `query:"fieldDataType"`
MetricName string `query:"metricName"`
SearchText string `query:"searchText"`
}
type GettableFieldValues struct {
Values *TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
type PostableFieldValueParams struct {
PostableFieldKeysParams
Name string `query:"name"`
ExistingQuery string `query:"existingQuery"`
}
func NewFieldKeySelectorFromPostableFieldKeysParams(params PostableFieldKeysParams) *FieldKeySelector {
var req FieldKeySelector
if params.StartUnixMilli != 0 {
req.StartUnixMilli = params.StartUnixMilli
// Round down to the nearest 6 hours (21600000 milliseconds)
req.StartUnixMilli -= req.StartUnixMilli % 21600000
}
if params.EndUnixMilli != 0 {
req.EndUnixMilli = params.EndUnixMilli
}
req.Signal = params.Signal
req.Source = params.Source
req.FieldContext = params.FieldContext
req.FieldDataType = params.FieldDataType
req.SelectorMatchType = FieldSelectorMatchTypeFuzzy
if params.Limit != 0 {
req.Limit = params.Limit
} else {
req.Limit = 1000
}
if params.MetricName != "" {
req.MetricContext = &MetricContext{
MetricName: params.MetricName,
}
}
req.Name = params.SearchText
if params.SearchText != "" && params.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.SearchText)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, req.Signal) {
req.Name = parsedFieldKey.Name
req.FieldContext = parsedFieldKey.FieldContext
}
}
}
return &req
}
func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValueParams) *FieldValueSelector {
keySelector := NewFieldKeySelectorFromPostableFieldKeysParams(params.PostableFieldKeysParams)
fieldValueSelector := &FieldValueSelector{
FieldKeySelector: keySelector,
}
fieldValueSelector.Name = params.Name
if params.Name != "" && fieldValueSelector.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.Name)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, fieldValueSelector.Signal) {
fieldValueSelector.Name = parsedFieldKey.Name
fieldValueSelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
fieldValueSelector.ExistingQuery = params.ExistingQuery
fieldValueSelector.Value = params.SearchText
if params.Limit != 0 {
fieldValueSelector.Limit = params.Limit
} else {
fieldValueSelector.Limit = 50
}
return fieldValueSelector
}

View File

@@ -154,3 +154,21 @@ func (f FieldContext) TagType() string {
}
return ""
}
func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
if ctx == FieldContextResource ||
ctx == FieldContextAttribute ||
ctx == FieldContextScope {
return true
}
switch signal.StringValue() {
case SignalLogs.StringValue():
return ctx == FieldContextLog || ctx == FieldContextBody
case SignalTraces.StringValue():
return ctx == FieldContextSpan || ctx == FieldContextEvent || ctx == FieldContextTrace
case SignalMetrics.StringValue():
return ctx == FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,48 @@
package telemetrytypes
// Enum returns the acceptable values for Signal.
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}
// Enum returns the acceptable values for FieldContext.
func (FieldContext) Enum() []any {
return []any{
FieldContextMetric,
FieldContextLog,
FieldContextSpan,
// FieldContextTrace,
FieldContextResource,
// FieldContextScope,
FieldContextAttribute,
// FieldContextEvent,
FieldContextBody,
}
}
// Enum returns the acceptable values for Source.
func (Source) Enum() []any {
return []any{
SourceMeter,
}
}
// Enum returns the acceptable values for FieldDataType.
func (FieldDataType) Enum() []any {
return []any{
FieldDataTypeString,
FieldDataTypeBool,
FieldDataTypeFloat64,
FieldDataTypeInt64,
FieldDataTypeNumber,
// FieldDataTypeArrayString,
// FieldDataTypeArrayFloat64,
// FieldDataTypeArrayBool,
// FieldDataTypeArrayInt64,
// FieldDataTypeArrayNumber,
}
}

View File

@@ -107,3 +107,13 @@ func (enum *Email) UnmarshalText(text []byte) error {
func (enum Email) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *Email) UnmarshalParam(param string) error {
email, err := NewEmail(param)
if err != nil {
return err
}
*enum = email
return nil
}

View File

@@ -77,3 +77,10 @@ func (enum *String) UnmarshalText(text []byte) error {
func (enum String) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
// Implement Gin's BindUnmarshaler interface
// See https://github.com/SigNoz/signoz/pull/10219 description for additional details
func (enum *String) UnmarshalParam(param string) error {
*enum = NewString(param)
return nil
}

View File

@@ -140,3 +140,13 @@ func (enum *UUID) UnmarshalText(text []byte) error {
func (enum UUID) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *UUID) UnmarshalParam(param string) error {
uuid, err := NewUUID(param)
if err != nil {
return err
}
*enum = uuid
return nil
}

View File

@@ -8,6 +8,7 @@ import (
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
ginbinding "github.com/gin-gonic/gin/binding"
)
var (
@@ -42,4 +43,7 @@ type Valuer interface {
// Implement encoding.TextUnmarshaler to allow the value to be marshalled unto a string
encoding.TextMarshaler
// Implement Gin's BindUnmarshaler interface
ginbinding.BindUnmarshaler
}