mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-18 23:12:36 +00:00
Compare commits
2 Commits
fix/toolti
...
tvats-expo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5731a3870b | ||
|
|
4843a3f292 |
3667
docs/api/openapi.yml
3667
docs/api/openapi.yml
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -107,10 +106,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
MutationFunction,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
|
||||
* @summary Query range
|
||||
*/
|
||||
export const queryRangeV5 = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<QueryRangeV5200>({
|
||||
url: `/api/v5/query_range`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getQueryRangeV5MutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['queryRangeV5'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return queryRangeV5(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type QueryRangeV5MutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>
|
||||
>;
|
||||
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Query range
|
||||
*/
|
||||
export const useQueryRangeV5 = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getQueryRangeV5MutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -67,6 +67,12 @@ export function prepareBarPanelConfig({
|
||||
maxTimeScale,
|
||||
});
|
||||
|
||||
builder.setCursor({
|
||||
focus: {
|
||||
prox: 1e3,
|
||||
},
|
||||
});
|
||||
|
||||
if (widget.stackedBarChart) {
|
||||
const seriesCount = (apiResponse?.data?.result?.length ?? 0) + 1; // +1 for 1-based uPlot series indices
|
||||
builder.setBands(getInitialStackedBands(seriesCount));
|
||||
|
||||
@@ -67,9 +67,7 @@ export const prepareUPlotConfig = ({
|
||||
maxTimeScale,
|
||||
});
|
||||
|
||||
const seriesList = apiResponse.data?.result || [];
|
||||
|
||||
seriesList.forEach((series) => {
|
||||
apiResponse.data?.result?.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
|
||||
@@ -7,10 +7,11 @@ import { merge } from 'lodash-es';
|
||||
import noop from 'lodash-es/noop';
|
||||
import uPlot, { Cursor, Hooks, Options } from 'uplot';
|
||||
|
||||
import { DEFAULT_CURSOR_CONFIG, DEFAULT_PLOT_CONFIG } from '../constants';
|
||||
import {
|
||||
ConfigBuilder,
|
||||
ConfigBuilderProps,
|
||||
DEFAULT_CURSOR_CONFIG,
|
||||
DEFAULT_PLOT_CONFIG,
|
||||
LegendItem,
|
||||
SelectionPreferencesSource,
|
||||
} from './types';
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils';
|
||||
import uPlot, { Series } from 'uplot';
|
||||
|
||||
import {
|
||||
@@ -292,16 +291,21 @@ function getBarPathBuilder({
|
||||
idx1: number,
|
||||
): Series.Paths | null => {
|
||||
let effectiveBarMaxWidth = barMaxWidth;
|
||||
const widthBasedOnStepInterval = calculateWidthBasedOnStepInterval({
|
||||
uPlotInstance: self,
|
||||
stepInterval,
|
||||
});
|
||||
|
||||
if (widthBasedOnStepInterval > 0) {
|
||||
effectiveBarMaxWidth = Math.min(
|
||||
effectiveBarMaxWidth,
|
||||
widthBasedOnStepInterval,
|
||||
);
|
||||
const xScale = self.scales.x as uPlot.Scale | undefined;
|
||||
if (xScale && typeof xScale.min === 'number') {
|
||||
const start = xScale.min as number;
|
||||
const end = start + stepInterval;
|
||||
const startPx = self.valToPos(start, 'x');
|
||||
const endPx = self.valToPos(end, 'x');
|
||||
const intervalPx = Math.abs(endPx - startPx);
|
||||
|
||||
if (intervalPx > 0) {
|
||||
effectiveBarMaxWidth =
|
||||
typeof barMaxWidth === 'number'
|
||||
? Math.min(barMaxWidth, intervalPx)
|
||||
: intervalPx;
|
||||
}
|
||||
}
|
||||
|
||||
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import uPlot, { Series } from 'uplot';
|
||||
import uPlot, { Cursor, Options, Series } from 'uplot';
|
||||
|
||||
import { ThresholdsDrawHookOptions } from '../hooks/types';
|
||||
|
||||
@@ -186,3 +186,47 @@ export interface LegendItem {
|
||||
color: uPlot.Series['stroke'];
|
||||
show: boolean;
|
||||
}
|
||||
|
||||
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
|
||||
focus: {
|
||||
alpha: 0.3,
|
||||
},
|
||||
cursor: {
|
||||
focus: {
|
||||
prox: 30,
|
||||
},
|
||||
},
|
||||
legend: {
|
||||
show: false,
|
||||
},
|
||||
padding: [16, 16, 8, 8],
|
||||
series: [],
|
||||
hooks: {},
|
||||
};
|
||||
|
||||
const POINTS_FILL_COLOR = '#FFFFFF';
|
||||
|
||||
export const DEFAULT_CURSOR_CONFIG: Cursor = {
|
||||
drag: { setScale: true },
|
||||
points: {
|
||||
one: true,
|
||||
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
|
||||
width: (_u, _seriesIdx, size) => size / 4,
|
||||
stroke: (u, seriesIdx): string => {
|
||||
const points = u.series[seriesIdx]?.points;
|
||||
const strokeFn =
|
||||
typeof points?.stroke === 'function' ? points.stroke : undefined;
|
||||
const strokeValue =
|
||||
strokeFn !== undefined
|
||||
? strokeFn(u, seriesIdx)
|
||||
: typeof points?.stroke === 'string'
|
||||
? points.stroke
|
||||
: '';
|
||||
return `${strokeValue}90`;
|
||||
},
|
||||
fill: (): string => POINTS_FILL_COLOR,
|
||||
},
|
||||
focus: {
|
||||
prox: 30,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
import { Cursor, Options } from 'uplot';
|
||||
|
||||
const POINTS_FILL_COLOR = '#FFFFFF';
|
||||
|
||||
export const DEFAULT_HOVER_PROX_VALUE = 30; // only snap if within 30px horizontally
|
||||
export const DEFAULT_FOCUS_PROX_VALUE = 1e6;
|
||||
|
||||
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
|
||||
focus: {
|
||||
alpha: 0.3,
|
||||
},
|
||||
legend: {
|
||||
show: false,
|
||||
},
|
||||
padding: [16, 16, 8, 8],
|
||||
series: [],
|
||||
hooks: {},
|
||||
};
|
||||
|
||||
export const DEFAULT_CURSOR_CONFIG: Cursor = {
|
||||
drag: { setScale: true },
|
||||
points: {
|
||||
one: true,
|
||||
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
|
||||
width: (_u, _seriesIdx, size) => size / 4,
|
||||
stroke: (u, seriesIdx): string => {
|
||||
const points = u.series[seriesIdx]?.points;
|
||||
const strokeFn =
|
||||
typeof points?.stroke === 'function' ? points.stroke : undefined;
|
||||
const strokeValue =
|
||||
strokeFn !== undefined
|
||||
? strokeFn(u, seriesIdx)
|
||||
: typeof points?.stroke === 'string'
|
||||
? points.stroke
|
||||
: '';
|
||||
return `${strokeValue}90`;
|
||||
},
|
||||
fill: (): string => POINTS_FILL_COLOR,
|
||||
},
|
||||
focus: {
|
||||
prox: DEFAULT_FOCUS_PROX_VALUE,
|
||||
},
|
||||
hover: {
|
||||
prox: DEFAULT_HOVER_PROX_VALUE,
|
||||
bias: 0,
|
||||
},
|
||||
};
|
||||
@@ -87,7 +87,7 @@ export function shouldShowTooltipForSync(
|
||||
export function shouldShowTooltipForInteraction(
|
||||
controller: TooltipControllerState,
|
||||
): boolean {
|
||||
return controller.focusedSeriesIndex != null;
|
||||
return controller.focusedSeriesIndex != null || controller.isAnySeriesActive;
|
||||
}
|
||||
|
||||
export function updateHoverState(
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
export function calculateWidthBasedOnStepInterval({
|
||||
uPlotInstance,
|
||||
stepInterval,
|
||||
}: {
|
||||
uPlotInstance: uPlot;
|
||||
stepInterval: number;
|
||||
}): number {
|
||||
const xScale = uPlotInstance.scales.x;
|
||||
if (xScale && typeof xScale.min === 'number') {
|
||||
const start = xScale.min as number;
|
||||
const end = start + stepInterval;
|
||||
const startPx = uPlotInstance.valToPos(start, 'x');
|
||||
const endPx = uPlotInstance.valToPos(end, 'x');
|
||||
return Math.abs(endPx - startPx);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -44,6 +45,7 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
rawDataExportHandler rawdataexport.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -63,6 +65,7 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -85,6 +88,7 @@ func NewFactory(
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
rawDataExportHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -109,6 +113,7 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -131,6 +136,7 @@ func newProvider(
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
rawDataExportHandler: rawDataExportHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -199,6 +205,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addRawDataExportRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
46
pkg/apiserver/signozapiserver/rawDataExport.go
Normal file
46
pkg/apiserver/signozapiserver/rawDataExport.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
v5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addRawDataExportRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
|
||||
ID: "HandleExportRawDataGET",
|
||||
Tags: []string{"logs", "traces"},
|
||||
Summary: "Export raw data",
|
||||
Description: "This endpoints allows simple query exporting raw data for traces and logs",
|
||||
Request: new(types.ExportRawDataQueryParams),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
|
||||
ID: "HandleExportRawDataPOST",
|
||||
Tags: []string{"logs", "traces"},
|
||||
Summary: "Export raw data",
|
||||
Description: "This endpoints allows complex query exporting raw data for traces and logs",
|
||||
Request: new(struct {
|
||||
v5.QueryRangeRequest
|
||||
types.ExportRawDataFormatQueryParam
|
||||
}),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
)
|
||||
|
||||
type ServeOpenAPIFunc func(openapi.OperationContext)
|
||||
@@ -60,39 +59,7 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
|
||||
}
|
||||
|
||||
// Add request structure
|
||||
reqOpts := []openapi.ContentOption{openapi.WithContentType(handler.openAPIDef.RequestContentType)}
|
||||
if len(handler.openAPIDef.RequestExamples) > 0 {
|
||||
reqOpts = append(reqOpts, openapi.WithCustomize(func(cor openapi.ContentOrReference) {
|
||||
rbOrRef, ok := cor.(*openapi3.RequestBodyOrRef)
|
||||
if !ok || rbOrRef.RequestBody == nil {
|
||||
return
|
||||
}
|
||||
ct := handler.openAPIDef.RequestContentType
|
||||
if ct == "" {
|
||||
ct = "application/json"
|
||||
}
|
||||
mt, exists := rbOrRef.RequestBody.Content[ct]
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
if mt.Examples == nil {
|
||||
mt.Examples = make(map[string]openapi3.ExampleOrRef)
|
||||
}
|
||||
for _, ex := range handler.openAPIDef.RequestExamples {
|
||||
val := ex.Value
|
||||
oaExample := openapi3.Example{Value: &val}
|
||||
if ex.Summary != "" {
|
||||
oaExample.WithSummary(ex.Summary)
|
||||
}
|
||||
if ex.Description != "" {
|
||||
oaExample.WithDescription(ex.Description)
|
||||
}
|
||||
mt.Examples[ex.Name] = openapi3.ExampleOrRef{Example: &oaExample}
|
||||
}
|
||||
rbOrRef.RequestBody.Content[ct] = mt
|
||||
}))
|
||||
}
|
||||
opCtx.AddReqStructure(handler.openAPIDef.Request, reqOpts...)
|
||||
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
|
||||
|
||||
// Add request query structure
|
||||
opCtx.AddReqStructure(handler.openAPIDef.RequestQuery)
|
||||
|
||||
@@ -9,14 +9,6 @@ import (
|
||||
"github.com/swaggest/rest/openapi"
|
||||
)
|
||||
|
||||
// OpenAPIExample is a named example for an OpenAPI operation.
|
||||
type OpenAPIExample struct {
|
||||
Name string
|
||||
Summary string
|
||||
Description string
|
||||
Value any
|
||||
}
|
||||
|
||||
// Def is the definition of an OpenAPI operation
|
||||
type OpenAPIDef struct {
|
||||
ID string
|
||||
@@ -26,7 +18,6 @@ type OpenAPIDef struct {
|
||||
Request any
|
||||
RequestQuery any
|
||||
RequestContentType string
|
||||
RequestExamples []OpenAPIExample
|
||||
Response any
|
||||
ResponseContentType string
|
||||
SuccessStatusCode int
|
||||
|
||||
@@ -6,17 +6,18 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -34,31 +35,27 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
|
||||
// ExportRawData handles data export requests.
|
||||
//
|
||||
// API Documentation:
|
||||
// Endpoint: GET /api/v1/export_raw_data
|
||||
// Endpoint: GET /api/v1/export_raw_data - logs and traces only (simple queries via query params)
|
||||
// Endpoint: POST /api/v1/export_raw_data - logs, traces, and trace operator (full QueryRangeRequest in JSON body)
|
||||
//
|
||||
// Query Parameters:
|
||||
// GET: Converts query params to QueryRangeRequest and delegates to common export logic. No composite_query/trace operator.
|
||||
//
|
||||
// - source (optional): Type of data to export ["logs" (default), "metrics", "traces"]
|
||||
// Note: Currently only "logs" is fully supported
|
||||
// GET Query Parameters:
|
||||
//
|
||||
// - source (optional): ["logs" (default) or "traces"] - metrics not supported
|
||||
// - format (optional): Output format ["csv" (default), "jsonl"]
|
||||
// - start (required): Start time (Unix timestamp in nanoseconds)
|
||||
// - end (required): End time (Unix timestamp in nanoseconds)
|
||||
// - limit (optional): Max rows to export (cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT)
|
||||
// - filter (optional): Filter expression
|
||||
// - columns (optional): Columns to include
|
||||
// - order_by (optional): Sorting ["column:direction"]
|
||||
//
|
||||
// - start (required): Start time for query (Unix timestamp in nanoseconds)
|
||||
// POST Request Body (QueryRangeRequest):
|
||||
//
|
||||
// - end (required): End time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - limit (optional): Maximum number of rows to export
|
||||
// Constraints: Must be positive and cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT
|
||||
//
|
||||
// - filter (optional): Filter expression to apply to the query
|
||||
//
|
||||
// - columns (optional): Specific columns to include in export
|
||||
// Default: all columns are returned
|
||||
// Format: ["context.field:type", "context.field", "field"]
|
||||
//
|
||||
// - order_by (optional): Sorting specification ["column:direction" or "context.field:type:direction"]
|
||||
// Direction: "asc" or "desc"
|
||||
// Default: ["timestamp:desc", "id:desc"]
|
||||
// - Accepts a full QueryRangeRequest JSON body with composite_query containing QueryEnvelope array
|
||||
// - Supports builder_query and builder_trace_operator types
|
||||
// - format query param still controls output format
|
||||
//
|
||||
// Response Headers:
|
||||
// - Content-Type: "text/csv" or "application/x-ndjson"
|
||||
@@ -86,75 +83,45 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
|
||||
// Export with filter and ordering:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
// &filter=severity="error"&order_by=timestamp:desc&limit=1000
|
||||
//
|
||||
// Export with composite query (POST only):
|
||||
// POST /api/v1/export_raw_data?format=csv
|
||||
// Body: {"start":1693612800000000000,"end":1693699199000000000,"composite_query":{"queries":[...]}}
|
||||
func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
source, err := getExportQuerySource(r.URL.Query())
|
||||
if err != nil {
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
var format string
|
||||
|
||||
if r.Method == http.MethodGet {
|
||||
var params types.ExportRawDataQueryParams
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), ¶ms); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
format = params.Format
|
||||
var err error
|
||||
queryRangeRequest, err = buildQueryRangeRequest(¶ms)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
var formatParam types.ExportRawDataFormatQueryParam
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), &formatParam); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
format = formatParam.Format
|
||||
if err := json.NewDecoder(r.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid request body: %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if err := validateAndApplyExportLimits(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
switch source {
|
||||
case "logs":
|
||||
handler.exportLogs(rw, r)
|
||||
case "traces":
|
||||
handler.exportTraces(rw, r)
|
||||
case "metrics":
|
||||
handler.exportMetrics(rw, r)
|
||||
default:
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs"))
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) exportMetrics(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "metrics export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "traces export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
// Set up response headers
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding") // Indicate that response varies based on Accept-Encoding
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
|
||||
queryParams := r.URL.Query()
|
||||
|
||||
startTime, endTime, err := getExportQueryTimeRange(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
limit, err := getExportQueryLimit(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
format, err := getExportQueryFormat(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Set appropriate content type and filename
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
|
||||
filterExpression := queryParams.Get("filter")
|
||||
|
||||
orderByExpression, err := getExportQueryOrderBy(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
columns := getExportQueryColumns(queryParams)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -167,70 +134,145 @@ func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeRequest := qbtypes.QueryRangeRequest{
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
setExportResponseHeaders(rw, format)
|
||||
|
||||
spec := qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Name: "raw",
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: filterExpression,
|
||||
},
|
||||
Limit: limit,
|
||||
Order: orderByExpression,
|
||||
}
|
||||
|
||||
spec.SelectFields = columns
|
||||
|
||||
queryRangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
// This will signal Export module to stop sending data
|
||||
doneChan := make(chan any)
|
||||
defer close(doneChan)
|
||||
rowChan, errChan := handler.module.ExportRawData(r.Context(), orgID, &queryRangeRequest, doneChan)
|
||||
|
||||
var isComplete bool
|
||||
isComplete, err := handler.executeExport(rowChan, errChan, format, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
|
||||
// validateAndApplyExportLimits validates query types and applies default/max limits to all queries.
|
||||
func validateAndApplyExportLimits(req *qbtypes.QueryRangeRequest) error {
|
||||
isTraceOperatorQueryPresent := false
|
||||
|
||||
// Check if the trace operator query is present
|
||||
queries := req.CompositeQuery.Queries
|
||||
for idx := range len(queries) {
|
||||
if _, ok := queries[idx].Spec.(qbtypes.QueryBuilderTraceOperator); ok {
|
||||
isTraceOperatorQueryPresent = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If the trace operator query is not present, and there are multiple queries, return an error
|
||||
if !isTraceOperatorQueryPresent && len(queries) > 1 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "multiple queries not allowed without a trace operator query")
|
||||
}
|
||||
|
||||
for idx := range queries {
|
||||
switch spec := queries[idx].Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
qbtypes.QueryBuilderTraceOperator:
|
||||
limit := queries[idx].GetLimit()
|
||||
if limit == 0 {
|
||||
limit = DefaultExportRowCountLimit
|
||||
} else if limit < 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
} else if limit > MaxExportRowCountLimit {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
queries[idx].SetLimit(limit)
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported query type: %T", spec)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// buildQueryRangeRequest builds a QueryRangeRequest from already-bound and validated GET query params.
|
||||
func buildQueryRangeRequest(params *types.ExportRawDataQueryParams) (qbtypes.QueryRangeRequest, error) {
|
||||
orderBy, err := parseExportQueryOrderBy(params.OrderBy)
|
||||
if err != nil {
|
||||
return qbtypes.QueryRangeRequest{}, err
|
||||
}
|
||||
|
||||
columns := parseExportQueryColumns(params.Columns)
|
||||
|
||||
var filter *qbtypes.Filter
|
||||
if params.Filter != "" {
|
||||
filter = &qbtypes.Filter{Expression: params.Filter}
|
||||
}
|
||||
|
||||
var query qbtypes.QueryEnvelope
|
||||
switch params.Source {
|
||||
case "logs":
|
||||
query = qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: filter,
|
||||
Limit: params.Limit,
|
||||
Order: orderBy,
|
||||
SelectFields: columns,
|
||||
},
|
||||
}
|
||||
case "traces":
|
||||
query = qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: filter,
|
||||
Limit: params.Limit,
|
||||
Order: orderBy,
|
||||
SelectFields: columns,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return qbtypes.QueryRangeRequest{
|
||||
Start: params.Start,
|
||||
End: params.End,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{query},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// setExportResponseHeaders sets common HTTP headers for export responses.
|
||||
func setExportResponseHeaders(rw http.ResponseWriter, format string) {
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding")
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
}
|
||||
|
||||
// executeExport streams data from rowChan to the response writer in the specified format.
|
||||
func (handler *handler) executeExport(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, format string, rw http.ResponseWriter) (bool, error) {
|
||||
switch format {
|
||||
case "csv", "":
|
||||
rw.Header().Set("Content-Type", "text/csv")
|
||||
csvWriter := csv.NewWriter(rw)
|
||||
isComplete, err = handler.exportLogsCSV(rowChan, errChan, csvWriter)
|
||||
isComplete, err := handler.exportRawDataCSV(rowChan, errChan, csvWriter)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
return false, err
|
||||
}
|
||||
csvWriter.Flush()
|
||||
return isComplete, nil
|
||||
case "jsonl":
|
||||
rw.Header().Set("Content-Type", "application/x-ndjson")
|
||||
isComplete, err = handler.exportLogsJSONL(rowChan, errChan, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
return handler.exportRawDataJSONL(rowChan, errChan, rw)
|
||||
default:
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl"))
|
||||
return
|
||||
return false, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
}
|
||||
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
|
||||
func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
var header []string
|
||||
// exportRawDataCSV is a generic CSV export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
|
||||
headerToIndexMapping := make(map[string]int, len(header))
|
||||
headerToIndexMapping := make(map[string]int)
|
||||
var once sync.Once
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
@@ -239,18 +281,14 @@ func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-
|
||||
if !ok {
|
||||
return true, nil
|
||||
}
|
||||
if header == nil {
|
||||
// Initialize and write header for CSV
|
||||
header = constructCSVHeaderFromQueryResponse(row.Data)
|
||||
|
||||
if err := csvWriter.Write(header); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
once.Do(func() {
|
||||
header := constructCSVHeaderFromQueryResponse(row.Data)
|
||||
_ = csvWriter.Write(header)
|
||||
// We ignore the error here, as it will get caught in the next iteration
|
||||
for i, col := range header {
|
||||
headerToIndexMapping[col] = i
|
||||
}
|
||||
}
|
||||
})
|
||||
record := constructCSVRecordFromQueryResponse(row.Data, headerToIndexMapping)
|
||||
if err := csvWriter.Write(record); err != nil {
|
||||
return false, err
|
||||
@@ -268,8 +306,8 @@ func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
|
||||
// exportRawDataJSONL is a generic JSONL export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
select {
|
||||
@@ -277,9 +315,11 @@ func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan
|
||||
if !ok {
|
||||
return true, nil
|
||||
}
|
||||
// Handle JSON format (JSONL - one object per line)
|
||||
jsonBytes, _ := json.Marshal(row.Data)
|
||||
totalBytes += uint64(len(jsonBytes)) + 1 // +1 for newline
|
||||
jsonBytes, err := json.Marshal(row.Data)
|
||||
if err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error marshaling JSON: %s", err)
|
||||
}
|
||||
totalBytes += uint64(len(jsonBytes)) + 1
|
||||
|
||||
if _, err := writer.Write(jsonBytes); err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON: %s", err)
|
||||
@@ -299,69 +339,6 @@ func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQuerySource(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("source") {
|
||||
case "logs", "":
|
||||
return "logs", nil
|
||||
case "metrics":
|
||||
return "metrics", errors.NewInvalidInputf(errors.CodeInvalidInput, "metrics export not yet supported")
|
||||
case "traces":
|
||||
return "traces", errors.NewInvalidInputf(errors.CodeInvalidInput, "traces export not yet supported")
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs, metrics or traces")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryFormat(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("format") {
|
||||
case "csv", "":
|
||||
return "csv", nil
|
||||
case "jsonl":
|
||||
return "jsonl", nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryLimit(queryParams url.Values) (int, error) {
|
||||
|
||||
limitStr := queryParams.Get("limit")
|
||||
if limitStr == "" {
|
||||
return DefaultExportRowCountLimit, nil
|
||||
} else {
|
||||
limit, err := strconv.Atoi(limitStr)
|
||||
if err != nil {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid limit format: %s", err.Error())
|
||||
}
|
||||
if limit <= 0 {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
}
|
||||
if limit > MaxExportRowCountLimit {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
return limit, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryTimeRange(queryParams url.Values) (uint64, uint64, error) {
|
||||
|
||||
startTimeStr := queryParams.Get("start")
|
||||
endTimeStr := queryParams.Get("end")
|
||||
|
||||
if startTimeStr == "" || endTimeStr == "" {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start and end time are required")
|
||||
}
|
||||
startTime, err := strconv.ParseUint(startTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time format: %s", err.Error())
|
||||
}
|
||||
endTime, err := strconv.ParseUint(endTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time format: %s", err.Error())
|
||||
}
|
||||
return startTime, endTime, nil
|
||||
}
|
||||
|
||||
func constructCSVHeaderFromQueryResponse(data map[string]any) []string {
|
||||
header := make([]string, 0, len(data))
|
||||
for key := range data {
|
||||
@@ -427,9 +404,12 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
valueStr = v.String()
|
||||
|
||||
default:
|
||||
// For all other complex types (maps, structs, etc.)
|
||||
jsonBytes, _ := json.Marshal(v)
|
||||
valueStr = string(jsonBytes)
|
||||
jsonBytes, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
valueStr = fmt.Sprintf("%v", v)
|
||||
} else {
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
}
|
||||
|
||||
record[index] = sanitizeForCSV(valueStr)
|
||||
@@ -438,23 +418,17 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
return record
|
||||
}
|
||||
|
||||
// getExportQueryColumns parses the "columns" query parameters and returns a slice of TelemetryFieldKey structs.
|
||||
// Each column should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryColumns(queryParams url.Values) []telemetrytypes.TelemetryFieldKey {
|
||||
columnParams := queryParams["columns"]
|
||||
|
||||
// parseExportQueryColumns converts bound column strings to TelemetryFieldKey structs.
|
||||
// Each column should be in the format "context.field:type" or "context.field" or "field"
|
||||
func parseExportQueryColumns(columnParams []string) []telemetrytypes.TelemetryFieldKey {
|
||||
columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams))
|
||||
|
||||
for _, columnStr := range columnParams {
|
||||
// Skip empty strings
|
||||
columnStr = strings.TrimSpace(columnStr)
|
||||
if columnStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr))
|
||||
}
|
||||
|
||||
return columns
|
||||
}
|
||||
|
||||
@@ -466,51 +440,24 @@ func getsizeOfStringSlice(slice []string) uint64 {
|
||||
return totalBytes
|
||||
}
|
||||
|
||||
// getExportQueryOrderBy parses the "order_by" query parameters and returns a slice of OrderBy structs.
|
||||
// Each "order_by" parameter should be in the format "column:direction"
|
||||
// Each "column" should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryOrderBy(queryParams url.Values) ([]qbtypes.OrderBy, error) {
|
||||
orderByParam := queryParams.Get("order_by")
|
||||
|
||||
// parseExportQueryOrderBy converts a bound order_by string to an OrderBy slice.
|
||||
// The string should be in the format "column:direction" and is assumed already validated.
|
||||
func parseExportQueryOrderBy(orderByParam string) ([]qbtypes.OrderBy, error) {
|
||||
orderByParam = strings.TrimSpace(orderByParam)
|
||||
if orderByParam == "" {
|
||||
return telemetrylogs.DefaultLogsV2SortingOrder, nil
|
||||
return []qbtypes.OrderBy{}, nil
|
||||
}
|
||||
|
||||
parts := strings.Split(orderByParam, ":")
|
||||
if len(parts) != 2 && len(parts) != 3 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by format: %s, should be <column>:<direction>", orderByParam)
|
||||
}
|
||||
|
||||
column := strings.Join(parts[:len(parts)-1], ":")
|
||||
direction := parts[len(parts)-1]
|
||||
|
||||
orderDirection, ok := qbtypes.OrderDirectionMap[direction]
|
||||
if !ok {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by direction: %s, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
|
||||
}
|
||||
|
||||
orderByKey := telemetrytypes.GetFieldKeyFromKeyText(column)
|
||||
|
||||
orderBy := []qbtypes.OrderBy{
|
||||
return []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: orderByKey,
|
||||
TelemetryFieldKey: telemetrytypes.GetFieldKeyFromKeyText(column),
|
||||
},
|
||||
Direction: orderDirection,
|
||||
Direction: qbtypes.OrderDirectionMap[direction],
|
||||
},
|
||||
}
|
||||
|
||||
// If we are ordering by the timestamp column, also order by the ID column
|
||||
if orderByKey.Name == telemetrylogs.LogsV2TimestampColumn {
|
||||
orderBy = append(orderBy, qbtypes.OrderBy{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
Direction: orderDirection,
|
||||
})
|
||||
}
|
||||
return orderBy, nil
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -2,261 +2,162 @@ package implrawdataexport
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGetExportQuerySource(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedSource string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "default logs source",
|
||||
queryParams: url.Values{},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "explicit logs source",
|
||||
queryParams: url.Values{"source": {"logs"}},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "metrics source - not supported",
|
||||
queryParams: url.Values{"source": {"metrics"}},
|
||||
expectedSource: "metrics",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "traces source - not supported",
|
||||
queryParams: url.Values{"source": {"traces"}},
|
||||
expectedSource: "traces",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid source",
|
||||
queryParams: url.Values{"source": {"invalid"}},
|
||||
expectedSource: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
func TestExportRawDataQueryParams_BindingDefaults(t *testing.T) {
|
||||
var params types.ExportRawDataQueryParams
|
||||
err := binding.Query.BindQuery(url.Values{}, ¶ms)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "logs", params.Source)
|
||||
assert.Equal(t, "csv", params.Format)
|
||||
assert.Equal(t, DefaultExportRowCountLimit, params.Limit)
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
source, err := getExportQuerySource(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedSource, source)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
|
||||
func logQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryFormat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedFormat string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "default csv format",
|
||||
queryParams: url.Values{},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "explicit csv format",
|
||||
queryParams: url.Values{"format": {"csv"}},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "jsonl format",
|
||||
queryParams: url.Values{"format": {"jsonl"}},
|
||||
expectedFormat: "jsonl",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid format",
|
||||
queryParams: url.Values{"format": {"xml"}},
|
||||
expectedFormat: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
format, err := getExportQueryFormat(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedFormat, format)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
func traceQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryLimit(t *testing.T) {
|
||||
func traceOperatorQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func makeRequest(queries ...qbtypes.QueryEnvelope) qbtypes.QueryRangeRequest {
|
||||
return qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{Queries: queries},
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateAndApplyExportLimits(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedLimit int
|
||||
req qbtypes.QueryRangeRequest
|
||||
expectedError bool
|
||||
// assertions on each query after the call (indexed)
|
||||
checkQueries func(t *testing.T, queries []qbtypes.QueryEnvelope)
|
||||
}{
|
||||
{
|
||||
name: "default limit",
|
||||
queryParams: url.Values{},
|
||||
expectedLimit: DefaultExportRowCountLimit,
|
||||
expectedError: false,
|
||||
name: "single log query, zero limit gets default",
|
||||
req: makeRequest(logQuery(0)),
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid limit",
|
||||
queryParams: url.Values{"limit": {"5000"}},
|
||||
expectedLimit: 5000,
|
||||
expectedError: false,
|
||||
name: "single log query, valid limit kept",
|
||||
req: makeRequest(logQuery(1000)),
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, 1000, q[0].GetLimit())
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "maximum limit",
|
||||
queryParams: url.Values{"limit": {strconv.Itoa(MaxExportRowCountLimit)}},
|
||||
expectedLimit: MaxExportRowCountLimit,
|
||||
expectedError: false,
|
||||
name: "single log query, max limit kept",
|
||||
req: makeRequest(logQuery(MaxExportRowCountLimit)),
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, MaxExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "limit exceeds maximum",
|
||||
queryParams: url.Values{"limit": {"100000"}},
|
||||
expectedLimit: 0,
|
||||
name: "single log query, limit exceeds max",
|
||||
req: makeRequest(logQuery(MaxExportRowCountLimit + 1)),
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid limit format",
|
||||
queryParams: url.Values{"limit": {"invalid"}},
|
||||
expectedLimit: 0,
|
||||
name: "single log query, negative limit",
|
||||
req: makeRequest(logQuery(-1)),
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "negative limit",
|
||||
queryParams: url.Values{"limit": {"-100"}},
|
||||
expectedLimit: 0,
|
||||
name: "single trace query, zero limit gets default",
|
||||
req: makeRequest(traceQuery(0)),
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple queries without trace operator",
|
||||
req: makeRequest(logQuery(0), traceQuery(0)),
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "trace operator with other queries — non-operator disabled",
|
||||
req: makeRequest(logQuery(500), traceOperatorQuery(1000)),
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.True(t, q[0].GetDisabled(), "log query should be disabled")
|
||||
assert.False(t, q[1].GetDisabled(), "trace operator should stay enabled")
|
||||
assert.Equal(t, 1000, q[1].GetLimit())
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "trace operator alone, zero limit gets default",
|
||||
req: makeRequest(traceOperatorQuery(0)),
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "unsupported query type",
|
||||
req: makeRequest(qbtypes.QueryEnvelope{Type: qbtypes.QueryTypeBuilder, Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{}}),
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
limit, err := getExportQueryLimit(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedLimit, limit)
|
||||
err := validateAndApplyExportLimits(&tt.req)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
if tt.checkQueries != nil {
|
||||
tt.checkQueries(t, tt.req.CompositeQuery.Queries)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeRange(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedStartTime uint64
|
||||
expectedEndTime uint64
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "valid time range",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedStartTime: 1640995200,
|
||||
expectedEndTime: 1641081600,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "missing start time",
|
||||
queryParams: url.Values{"end": {"1641081600"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing end time",
|
||||
queryParams: url.Values{"start": {"1640995200"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing both times",
|
||||
queryParams: url.Values{},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid start time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"invalid"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid end time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"invalid"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
startTime, endTime, err := getExportQueryTimeRange(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedStartTime, startTime)
|
||||
assert.Equal(t, tt.expectedEndTime, endTime)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryColumns(t *testing.T) {
|
||||
func TestParseExportQueryColumns(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
input []string
|
||||
expectedColumns []telemetrytypes.TelemetryFieldKey
|
||||
}{
|
||||
{
|
||||
name: "no columns specified",
|
||||
queryParams: url.Values{},
|
||||
name: "empty input",
|
||||
input: []string{},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{},
|
||||
},
|
||||
{
|
||||
name: "single column",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp"},
|
||||
},
|
||||
name: "single column",
|
||||
input: []string{"timestamp"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple columns",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "message", "level"},
|
||||
},
|
||||
name: "multiple columns",
|
||||
input: []string{"timestamp", "message", "level"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "message"},
|
||||
@@ -264,211 +165,90 @@ func TestGetExportQueryColumns(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "", "level"},
|
||||
},
|
||||
name: "empty entry is skipped",
|
||||
input: []string{"timestamp", "", "level"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "whitespace column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", " ", "level"},
|
||||
},
|
||||
name: "whitespace-only entry is skipped",
|
||||
input: []string{"timestamp", " ", "level"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with data type",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user:string", "level"},
|
||||
},
|
||||
name: "column with context and type",
|
||||
input: []string{"attribute.user:string"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with dot notation",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user.string", "level"},
|
||||
},
|
||||
name: "column with context, dot-notation name",
|
||||
input: []string{"attribute.user.string"},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
columns := getExportQueryColumns(tt.queryParams)
|
||||
columns := parseExportQueryColumns(tt.input)
|
||||
assert.Equal(t, len(tt.expectedColumns), len(columns))
|
||||
for i, expectedCol := range tt.expectedColumns {
|
||||
assert.Equal(t, expectedCol, columns[i])
|
||||
for i, expected := range tt.expectedColumns {
|
||||
assert.Equal(t, expected, columns[i])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryOrderBy(t *testing.T) {
|
||||
func TestParseExportQueryOrderBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
input string
|
||||
expectedOrder []qbtypes.OrderBy
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "no order specified",
|
||||
queryParams: url.Values{},
|
||||
name: "empty string returns empty slice",
|
||||
input: "",
|
||||
expectedOrder: []qbtypes.OrderBy{},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "simple column asc",
|
||||
input: "timestamp:asc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "simple column desc",
|
||||
input: "timestamp:desc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single order error, direction not specified",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single order no error",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "multiple orders",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "body:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "empty order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "whitespace order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", " ", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid order name (should error out)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attributes.user:", "id:asc"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user:string:desc", "id:asc"},
|
||||
},
|
||||
name: "column with context and type qualifier",
|
||||
input: "attribute.user:string:desc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
@@ -484,10 +264,8 @@ func TestGetExportQueryOrderBy(t *testing.T) {
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user.string:desc", "id:asc"},
|
||||
},
|
||||
name: "column with context, dot-notation name",
|
||||
input: "attribute.user.string:desc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
@@ -501,18 +279,35 @@ func TestGetExportQueryOrderBy(t *testing.T) {
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "resource with context and type",
|
||||
input: "resource.service.name:string:asc",
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
order, err := getExportQueryOrderBy(tt.queryParams)
|
||||
order, err := parseExportQueryOrderBy(tt.input)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, len(tt.expectedOrder), len(order))
|
||||
for i, expectedOrd := range tt.expectedOrder {
|
||||
assert.Equal(t, expectedOrd, order[i])
|
||||
for i, expected := range tt.expectedOrder {
|
||||
assert.Equal(t, expected, order[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -529,10 +324,8 @@ func TestConstructCSVHeaderFromQueryResponse(t *testing.T) {
|
||||
|
||||
header := constructCSVHeaderFromQueryResponse(data)
|
||||
|
||||
// Since map iteration order is not guaranteed, check that all expected keys are present
|
||||
expectedKeys := []string{"timestamp", "message", "level", "id"}
|
||||
assert.Equal(t, len(expectedKeys), len(header))
|
||||
|
||||
for _, key := range expectedKeys {
|
||||
assert.Contains(t, header, key)
|
||||
}
|
||||
|
||||
@@ -23,8 +23,26 @@ func NewModule(querier querier.Querier) rawdataexport.Module {
|
||||
|
||||
func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, doneChan chan any) (chan *qbtypes.RawRow, chan error) {
|
||||
|
||||
spec := rangeRequest.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.LogAggregation])
|
||||
rowCountLimit := spec.Limit
|
||||
isTraceOperatorQueryPresent := false
|
||||
traceOperatorQueryIndex := -1
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
for idx := range len(queries) {
|
||||
if _, ok := queries[idx].Spec.(qbtypes.QueryBuilderTraceOperator); ok {
|
||||
isTraceOperatorQueryPresent = true
|
||||
traceOperatorQueryIndex = idx
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If the trace operator query is present, mark the queries other than trace operator as disabled
|
||||
if isTraceOperatorQueryPresent {
|
||||
for idx := range len(queries) {
|
||||
if idx != traceOperatorQueryIndex {
|
||||
queries[idx].SetDisabled(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rowChan := make(chan *qbtypes.RawRow, 1)
|
||||
errChan := make(chan error, 1)
|
||||
@@ -38,52 +56,62 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
|
||||
defer close(errChan)
|
||||
defer close(rowChan)
|
||||
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
spec.Limit = min(ChunkSize, rowCountLimit-rowCount)
|
||||
spec.Offset = rowCount
|
||||
|
||||
rangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
response, err := m.querier.QueryRange(contextWithTimeout, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Break if we did not receive any new rows
|
||||
if newRowsCount == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
if isTraceOperatorQueryPresent {
|
||||
// If the trace operator query is present, we need to export the data for the trace operator query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, traceOperatorQueryIndex)
|
||||
} else {
|
||||
// If the trace operator query is not present, we need to export the data for the first query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, 0)
|
||||
}
|
||||
}()
|
||||
|
||||
return rowChan, errChan
|
||||
|
||||
}
|
||||
|
||||
func exportRawDataForSingleQuery(querier querier.Querier, ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, rowChan chan *qbtypes.RawRow, errChan chan error, doneChan chan any, queryIndex int) {
|
||||
|
||||
query := rangeRequest.CompositeQuery.Queries[queryIndex]
|
||||
rowCountLimit := query.GetLimit()
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
chunkSize := min(ChunkSize, rowCountLimit-rowCount)
|
||||
query.SetLimit(chunkSize)
|
||||
query.SetOffset(rowCount)
|
||||
|
||||
response, err := querier.QueryRange(ctx, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
// Stop if we received fewer rows than requested — no more data available
|
||||
if newRowsCount < chunkSize {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,454 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: queryRangeV5Examples,
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -586,7 +586,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// Export
|
||||
router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
|
||||
// router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
|
||||
|
||||
|
||||
@@ -871,7 +871,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -23,15 +20,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type OpenAPI struct {
|
||||
@@ -58,15 +53,12 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ gateway.Handler }{},
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ rawdataexport.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -100,67 +92,10 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// The library's MarshalYAML does a JSON round-trip that converts all numbers
|
||||
// to float64, causing large integers (e.g. epoch millisecond timestamps) to
|
||||
// render in scientific notation (1.6409952e+12).
|
||||
jsonData, err := openapi.reflector.Spec.MarshalJSON()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec := json.NewDecoder(bytes.NewReader(jsonData))
|
||||
dec.UseNumber()
|
||||
|
||||
var v any
|
||||
if err := dec.Decode(&v); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
convertJSONNumbers(v)
|
||||
|
||||
spec, err := yaml.Marshal(v)
|
||||
spec, err := openapi.reflector.Spec.MarshalYAML()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return os.WriteFile(path, spec, 0o600)
|
||||
}
|
||||
|
||||
// convertJSONNumbers recursively walks a decoded JSON structure and converts
|
||||
// json.Number values to int64 (preferred) or float64 so that YAML marshaling
|
||||
// renders them as plain numbers instead of quoted strings.
|
||||
func convertJSONNumbers(v interface{}) {
|
||||
switch val := v.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, elem := range val {
|
||||
if n, ok := elem.(json.Number); ok {
|
||||
if i, err := n.Int64(); err == nil {
|
||||
val[k] = i
|
||||
} else if f, err := n.Float64(); err == nil {
|
||||
val[k] = f
|
||||
}
|
||||
} else {
|
||||
convertJSONNumbers(elem)
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, elem := range val {
|
||||
if n, ok := elem.(json.Number); ok {
|
||||
if i64, err := n.Int64(); err == nil {
|
||||
val[i] = i64
|
||||
} else if f, err := n.Float64(); err == nil {
|
||||
val[i] = f
|
||||
}
|
||||
} else {
|
||||
convertJSONNumbers(elem)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
querier.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -252,6 +252,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.GatewayHandler,
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
handlers.RawDataExport,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -61,11 +61,13 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type fieldMapper struct {}
|
||||
type fieldMapper struct {
|
||||
}
|
||||
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
@@ -252,27 +254,12 @@ func (m *fieldMapper) buildFieldForJSON(key *telemetrytypes.TelemetryFieldKey) (
|
||||
"plan length is less than 2 for promoted path: %s", key.Name)
|
||||
}
|
||||
|
||||
node := plan[1]
|
||||
promotedExpr := fmt.Sprintf(
|
||||
"dynamicElement(%s, '%s')",
|
||||
node.FieldPath(),
|
||||
node.TerminalConfig.ElemType.StringValue(),
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
)
|
||||
|
||||
// dynamicElement returns NULL for scalar types or an empty array for array types.
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
expr = fmt.Sprintf(
|
||||
"if(length(%s) > 0, %s, %s)",
|
||||
promotedExpr,
|
||||
promotedExpr,
|
||||
expr,
|
||||
)
|
||||
} else {
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", promotedExpr, expr)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return expr, nil
|
||||
@@ -294,7 +281,8 @@ func (m *fieldMapper) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (stri
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic)
|
||||
// Iterate branches in deterministic order (JSON then Dynamic) so generated SQL
|
||||
// is stable across environments; map iteration order is random in Go.
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
for _, branchType := range node.BranchesInOrder() {
|
||||
|
||||
@@ -32,6 +32,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
|
||||
conditions := []string{}
|
||||
for _, node := range c.key.JSONPlan {
|
||||
condition, err := c.emitPlannedCondition(node, operator, value, sb)
|
||||
@@ -72,9 +73,9 @@ func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONA
|
||||
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains:
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
}
|
||||
}
|
||||
@@ -190,14 +191,13 @@ func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytype
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
key := "x"
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, "x", operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
@@ -279,31 +279,27 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
|
||||
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
values = []any{value}
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.In(fieldExpr, values...), nil
|
||||
conds := []string{}
|
||||
for _, v := range values {
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
conds = append(conds, sb.E(fieldExpr, v))
|
||||
} else {
|
||||
conds = append(conds, sb.NE(fieldExpr, v))
|
||||
}
|
||||
}
|
||||
return sb.NotIn(fieldExpr, values...), nil
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.Or(conds...), nil
|
||||
}
|
||||
return sb.And(conds...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
// between and not between
|
||||
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
return "", qbtypes.ErrBetweenValues
|
||||
}
|
||||
if len(values) != 2 {
|
||||
return "", qbtypes.ErrBetweenValues
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorBetween {
|
||||
return sb.Between(fieldExpr, values[0], values[1]), nil
|
||||
}
|
||||
return sb.NotBetween(fieldExpr, values[0], values[1]), nil
|
||||
default:
|
||||
return "", qbtypes.ErrUnsupportedOperator
|
||||
}
|
||||
|
||||
@@ -316,7 +316,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -345,7 +345,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -360,55 +360,12 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Dynamic array IN Operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body.education[].parameters IN [1.65, 1.99]"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> toFloat64(x) IN (?, ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> x IN (?, ?), arrayMap(x->dynamicElement(x, 'Array(Nullable(Float64))'), arrayFilter(x->(dynamicType(x) = 'Array(Nullable(Float64))'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), 1.65, 1.99, 1.65, 1.99, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Integer BETWEEN Operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester BETWEEN 2 AND 4"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Integer IN Operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester IN [2, 4]"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Equals to 'sports' inside array of awards",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
@@ -432,7 +389,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(toFloat64OrNull(x) -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%4%", float64(4), "%4%", float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
|
||||
},
|
||||
@@ -447,7 +404,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%Good%", "Good", "%Good%", "Good", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
|
||||
},
|
||||
@@ -535,7 +492,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
disableBodyJSONQuery(t)
|
||||
}()
|
||||
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, "education", "tags")
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, "education")
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
@@ -543,20 +500,6 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "Has Array promoted uses body fallback",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "has(body.tags, 'production')"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(if(length(dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))')) > 0, dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))'), dynamicElement(body_json.`tags`, 'Array(Nullable(String))')), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "production", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Key inside Array(JSON) exists",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
@@ -608,7 +551,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -637,7 +580,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -652,7 +595,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
|
||||
@@ -3,12 +3,12 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
|
||||
@@ -41,55 +41,31 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
|
||||
}
|
||||
|
||||
func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
|
||||
if operator.IsArrayOperator() && reflect.ValueOf(value).Kind() != reflect.Slice {
|
||||
value = []any{value}
|
||||
}
|
||||
|
||||
// closure to calculate the data type of the value
|
||||
var closure func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any)
|
||||
closure = func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
|
||||
// check if the value is a int, float, string, bool
|
||||
valueType := telemetrytypes.FieldDataTypeUnspecified
|
||||
switch v := value.(type) {
|
||||
case []any:
|
||||
// take the first element and infer the type
|
||||
var scalerType telemetrytypes.FieldDataType
|
||||
if len(v) > 0 {
|
||||
// Note: [[...]] Slices inside Slices are not handled yet
|
||||
if reflect.ValueOf(v[0]).Kind() == reflect.Slice {
|
||||
return telemetrytypes.FieldDataTypeUnspecified, value
|
||||
}
|
||||
|
||||
scalerType, _ = closure(v[0], key)
|
||||
}
|
||||
|
||||
arrayType := telemetrytypes.ScalerFieldTypeToArrayFieldType[scalerType]
|
||||
switch {
|
||||
// decide on the field data type based on the key
|
||||
case key.FieldDataType.IsArray():
|
||||
return arrayType, v
|
||||
default:
|
||||
// TODO(Piyush): backward compatibility for the old String based JSON QB queries
|
||||
if strings.HasSuffix(key.Name, telemetrytypes.ArrayAnyIndexSuffix) {
|
||||
return arrayType, v
|
||||
}
|
||||
return scalerType, v
|
||||
}
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
valueType = telemetrytypes.FieldDataTypeFloat64
|
||||
case string:
|
||||
valueType, value = parseStrValue(v, operator)
|
||||
case bool:
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
// check if the value is a int, float, string, bool
|
||||
valueType := telemetrytypes.FieldDataTypeUnspecified
|
||||
switch v := value.(type) {
|
||||
case []any:
|
||||
// take the first element and infer the type
|
||||
if len(v) > 0 {
|
||||
valueType, _ = InferDataType(v[0], operator, key)
|
||||
}
|
||||
|
||||
return valueType, value
|
||||
return valueType, v
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
valueType = telemetrytypes.FieldDataTypeFloat64
|
||||
case string:
|
||||
valueType, value = parseStrValue(v, operator)
|
||||
case bool:
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
}
|
||||
|
||||
// calculate the data type of the value
|
||||
return closure(value, key)
|
||||
// check if it is array
|
||||
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
|
||||
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
|
||||
}
|
||||
|
||||
return valueType, value
|
||||
}
|
||||
|
||||
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
|
||||
@@ -421,38 +421,6 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "IN operator with json search",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "body.user_names[*] IN 'john_doe'",
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "has with json search",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "has(body.user_names[*], 'john_doe')",
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
|
||||
@@ -112,8 +112,7 @@ func (t *telemetryMetaStore) buildBodyJSONPaths(ctx context.Context,
|
||||
}
|
||||
|
||||
for _, fieldKey := range fieldKeys {
|
||||
promotedKey := strings.Split(fieldKey.Name, telemetrytypes.ArraySep)[0]
|
||||
fieldKey.Materialized = promoted.Contains(promotedKey)
|
||||
fieldKey.Materialized = promoted.Contains(fieldKey.Name)
|
||||
fieldKey.Indexes = indexes[fieldKey.Name]
|
||||
}
|
||||
|
||||
@@ -502,8 +501,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
pathConditions = append(pathConditions, sb.Equal("path", split[0]))
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
package telemetrytraces
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
@@ -379,4 +382,19 @@ var (
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
|
||||
DefaultTracesSortingOrder = []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: DefaultFields["timestamp"],
|
||||
},
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
},
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: DefaultFields["span_id"],
|
||||
},
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -172,6 +172,7 @@ var (
|
||||
|
||||
func (TimeAggregation) Enum() []any {
|
||||
return []any{
|
||||
TimeAggregationUnspecified,
|
||||
TimeAggregationLatest,
|
||||
TimeAggregationSum,
|
||||
TimeAggregationAvg,
|
||||
@@ -204,6 +205,7 @@ var (
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
return []any{
|
||||
SpaceAggregationUnspecified,
|
||||
SpaceAggregationSum,
|
||||
SpaceAggregationAvg,
|
||||
SpaceAggregationMin,
|
||||
|
||||
@@ -10,35 +10,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type Step struct{ time.Duration }
|
||||
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
numSchema.WithDescription("Duration in seconds.")
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s *Step) UnmarshalJSON(b []byte) error {
|
||||
if len(b) == 0 {
|
||||
return nil
|
||||
@@ -186,17 +161,6 @@ func (f FilterOperator) IsStringSearchOperator() bool {
|
||||
}
|
||||
}
|
||||
|
||||
// IsArrayOperator returns true if the operator works with array values only
|
||||
func (f FilterOperator) IsArrayOperator() bool {
|
||||
switch f {
|
||||
case FilterOperatorIn, FilterOperatorNotIn,
|
||||
FilterOperatorBetween, FilterOperatorNotBetween:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type OrderDirection struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -206,14 +170,6 @@ var (
|
||||
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for OrderDirection.
|
||||
func (OrderDirection) Enum() []any {
|
||||
return []any{
|
||||
OrderDirectionAsc,
|
||||
OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
OrderDirectionMap = map[string]OrderDirection{
|
||||
"asc": OrderDirectionAsc,
|
||||
@@ -236,19 +192,6 @@ var (
|
||||
ReduceToMedian = ReduceTo{valuer.NewString("median")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for ReduceTo.
|
||||
func (ReduceTo) Enum() []any {
|
||||
return []any{
|
||||
ReduceToSum,
|
||||
ReduceToCount,
|
||||
ReduceToAvg,
|
||||
ReduceToMin,
|
||||
ReduceToMax,
|
||||
ReduceToLast,
|
||||
ReduceToMedian,
|
||||
}
|
||||
}
|
||||
|
||||
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
|
||||
// reduceTo can be one of: last, sum, avg, min, max, count, median
|
||||
// if reduceTo is not recognized, the function returns the original series
|
||||
|
||||
@@ -36,30 +36,6 @@ var (
|
||||
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for FunctionName.
|
||||
func (FunctionName) Enum() []any {
|
||||
return []any{
|
||||
FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero,
|
||||
}
|
||||
}
|
||||
|
||||
// Validate checks if the FunctionName is valid and one of the known types
|
||||
func (fn FunctionName) Validate() error {
|
||||
validFunctions := []FunctionName{
|
||||
|
||||
@@ -16,17 +16,6 @@ var (
|
||||
JoinTypeCross = JoinType{valuer.NewString("cross")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for JoinType.
|
||||
func (JoinType) Enum() []any {
|
||||
return []any{
|
||||
JoinTypeInner,
|
||||
JoinTypeLeft,
|
||||
JoinTypeRight,
|
||||
JoinTypeFull,
|
||||
JoinTypeCross,
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRef struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@ package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type Query interface {
|
||||
@@ -31,12 +29,4 @@ type ExecStats struct {
|
||||
StepIntervals map[string]uint64 `json:"stepIntervals,omitempty"`
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &ExecStats{}
|
||||
|
||||
// PrepareJSONSchema adds description to the ExecStats schema.
|
||||
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeRange struct{ From, To uint64 } // ms since epoch
|
||||
|
||||
@@ -16,17 +16,3 @@ var (
|
||||
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
|
||||
QueryTypePromQL = QueryType{valuer.NewString("promql")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for QueryType.
|
||||
func (QueryType) Enum() []any {
|
||||
return []any{
|
||||
QueryTypeBuilder,
|
||||
QueryTypeFormula,
|
||||
// Not yet supported.
|
||||
// QueryTypeSubQuery,
|
||||
// QueryTypeJoin,
|
||||
QueryTypeTraceOperator,
|
||||
QueryTypeClickHouseSQL,
|
||||
QueryTypePromQL,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"maps"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
@@ -9,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type QueryEnvelope struct {
|
||||
@@ -19,71 +20,6 @@ type QueryEnvelope struct {
|
||||
Spec any `json:"spec"`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
|
||||
type queryEnvelopeBuilderTrace struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
|
||||
type queryEnvelopeBuilderLog struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
|
||||
type queryEnvelopeBuilderMetric struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
|
||||
type queryEnvelopeFormula struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
|
||||
// type queryEnvelopeJoin struct {
|
||||
// Type QueryType `json:"type" description:"The type of the query."`
|
||||
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
|
||||
// }
|
||||
|
||||
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
|
||||
type queryEnvelopeTraceOperator struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
|
||||
type queryEnvelopePromQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
|
||||
type queryEnvelopeClickHouseSQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryEnvelope{}
|
||||
|
||||
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
|
||||
// Each variant represents a different query type with its corresponding spec schema.
|
||||
func (QueryEnvelope) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
queryEnvelopeBuilderTrace{},
|
||||
queryEnvelopeBuilderLog{},
|
||||
queryEnvelopeBuilderMetric{},
|
||||
queryEnvelopeFormula{},
|
||||
// queryEnvelopeJoin{},
|
||||
queryEnvelopeTraceOperator{},
|
||||
queryEnvelopePromQL{},
|
||||
queryEnvelopeClickHouseSQL{},
|
||||
}
|
||||
}
|
||||
|
||||
// implement custom json unmarshaler for the QueryEnvelope
|
||||
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
var shadow struct {
|
||||
@@ -196,12 +132,6 @@ type CompositeQuery struct {
|
||||
Queries []QueryEnvelope `json:"queries"`
|
||||
}
|
||||
|
||||
// PrepareJSONSchema adds description to the CompositeQuery schema.
|
||||
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
|
||||
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
|
||||
type Alias CompositeQuery
|
||||
@@ -264,16 +194,6 @@ var (
|
||||
TextBoxVariableType = VariableType{valuer.NewString("text")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for VariableType.
|
||||
func (VariableType) Enum() []any {
|
||||
return []any{
|
||||
QueryVariableType,
|
||||
DynamicVariableType,
|
||||
CustomVariableType,
|
||||
TextBoxVariableType,
|
||||
}
|
||||
}
|
||||
|
||||
type VariableItem struct {
|
||||
Type VariableType `json:"type"`
|
||||
Value any `json:"value"`
|
||||
@@ -299,12 +219,6 @@ type QueryRangeRequest struct {
|
||||
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
|
||||
}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeRequest schema.
|
||||
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
|
||||
stepsMap := make(map[string]int64)
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
@@ -567,3 +481,72 @@ func (r *QueryRangeRequest) GetQueriesSupportingZeroDefault() map[string]bool {
|
||||
|
||||
return canDefaultZero
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) Copy() QueryRangeRequest {
|
||||
c := QueryRangeRequest{
|
||||
SchemaVersion: r.SchemaVersion,
|
||||
Start: r.Start,
|
||||
End: r.End,
|
||||
RequestType: r.RequestType,
|
||||
NoCache: r.NoCache,
|
||||
FormatOptions: r.FormatOptions,
|
||||
Variables: make(map[string]VariableItem),
|
||||
CompositeQuery: CompositeQuery{},
|
||||
}
|
||||
|
||||
maps.Copy(c.Variables, r.Variables)
|
||||
|
||||
c.CompositeQuery.Queries = make([]QueryEnvelope, len(r.CompositeQuery.Queries))
|
||||
for i, q := range r.CompositeQuery.Queries {
|
||||
if q.Spec != nil {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: spec.Copy(),
|
||||
}
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: spec.Copy(),
|
||||
}
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: spec.Copy(),
|
||||
}
|
||||
case QueryBuilderFormula:
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: spec.Copy(),
|
||||
}
|
||||
case QueryBuilderJoin:
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: spec.Copy(),
|
||||
}
|
||||
case QueryBuilderTraceOperator:
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: spec.Copy(),
|
||||
}
|
||||
default:
|
||||
// Check if spec implements a Copy method
|
||||
specValue := reflect.ValueOf(q.Spec)
|
||||
copyMethod := specValue.MethodByName("Copy")
|
||||
if copyMethod.IsValid() && copyMethod.Type().NumIn() == 0 && copyMethod.Type().NumOut() == 1 {
|
||||
// Call Copy method and use the result
|
||||
result := copyMethod.Call(nil)
|
||||
c.CompositeQuery.Queries[i] = QueryEnvelope{
|
||||
Type: q.Type,
|
||||
Spec: result[0].Interface(),
|
||||
}
|
||||
} else {
|
||||
// Fallback to shallow copy
|
||||
c.CompositeQuery.Queries[i] = q
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
386
pkg/types/querybuildertypes/querybuildertypesv5/req_getters.go
Normal file
386
pkg/types/querybuildertypes/querybuildertypesv5/req_getters.go
Normal file
@@ -0,0 +1,386 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
// GetExpression returns the expression string. Panics for Join, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetExpression() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Expression
|
||||
case QueryBuilderFormula:
|
||||
return spec.Expression
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetReturnSpansFrom returns the return-spans-from value. Panics for all types except TraceOperator.
|
||||
func (q *QueryEnvelope) GetReturnSpansFrom() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.ReturnSpansFrom
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetSignal returns the signal. Panics for all types except Query[T].
|
||||
func (q *QueryEnvelope) GetSignal() telemetrytypes.Signal {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Signal
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Signal
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Signal
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetSource returns the source. Panics for all types except Query[T].
|
||||
func (q *QueryEnvelope) GetSource() telemetrytypes.Source {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Source
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Source
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Source
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetQuery returns the raw query string. Panics for all types except PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetQuery() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
return spec.Query
|
||||
case ClickHouseQuery:
|
||||
return spec.Query
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetStep returns the PromQL step size. Panics for all types except PromQuery.
|
||||
func (q *QueryEnvelope) GetStep() Step {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
return spec.Step
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetStats returns the PromQL stats flag. Panics for all types except PromQuery.
|
||||
func (q *QueryEnvelope) GetStats() bool {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
return spec.Stats
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetLeft returns the left query reference of a join. Panics for all types except QueryBuilderJoin.
|
||||
func (q *QueryEnvelope) GetLeft() QueryRef {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.Left
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetRight returns the right query reference of a join. Panics for all types except QueryBuilderJoin.
|
||||
func (q *QueryEnvelope) GetRight() QueryRef {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.Right
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetJoinType returns the join type. Panics for all types except QueryBuilderJoin.
|
||||
func (q *QueryEnvelope) GetJoinType() JoinType {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.Type
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetOn returns the join ON condition. Panics for all types except QueryBuilderJoin.
|
||||
func (q *QueryEnvelope) GetOn() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
return spec.On
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetQueryName returns the name of the spec.
|
||||
func (q *QueryEnvelope) GetQueryName() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderFormula:
|
||||
return spec.Name
|
||||
case QueryBuilderJoin:
|
||||
return spec.Name
|
||||
case PromQuery:
|
||||
return spec.Name
|
||||
case ClickHouseQuery:
|
||||
return spec.Name
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetDisabled returns whether the spec is disabled.
|
||||
func (q *QueryEnvelope) GetDisabled() bool {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderFormula:
|
||||
return spec.Disabled
|
||||
case QueryBuilderJoin:
|
||||
return spec.Disabled
|
||||
case PromQuery:
|
||||
return spec.Disabled
|
||||
case ClickHouseQuery:
|
||||
return spec.Disabled
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetLimit returns the row limit. Panics for PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetLimit() int {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Limit
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Limit
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Limit
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Limit
|
||||
case QueryBuilderFormula:
|
||||
return spec.Limit
|
||||
case QueryBuilderJoin:
|
||||
return spec.Limit
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetOffset returns the row offset. Panics for Formula, Join, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetOffset() int {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Offset
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Offset
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Offset
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Offset
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetType returns the QueryType of the envelope.
|
||||
func (q *QueryEnvelope) GetType() QueryType {
|
||||
return q.Type
|
||||
}
|
||||
|
||||
// GetOrder returns the order-by clauses. Panics for PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetOrder() []OrderBy {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Order
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Order
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Order
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Order
|
||||
case QueryBuilderFormula:
|
||||
return spec.Order
|
||||
case QueryBuilderJoin:
|
||||
return spec.Order
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetGroupBy returns the group-by keys. Panics for Formula, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetGroupBy() []GroupByKey {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.GroupBy
|
||||
case QueryBuilderJoin:
|
||||
return spec.GroupBy
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetFilter returns the filter. Panics for Formula, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetFilter() *Filter {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Filter
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Filter
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Filter
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Filter
|
||||
case QueryBuilderJoin:
|
||||
return spec.Filter
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetHaving returns the having clause. Panics for PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetHaving() *Having {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Having
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Having
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Having
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Having
|
||||
case QueryBuilderFormula:
|
||||
return spec.Having
|
||||
case QueryBuilderJoin:
|
||||
return spec.Having
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetFunctions returns the post-processing functions. Panics for PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetFunctions() []Function {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Functions
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Functions
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Functions
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Functions
|
||||
case QueryBuilderFormula:
|
||||
return spec.Functions
|
||||
case QueryBuilderJoin:
|
||||
return spec.Functions
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetSelectFields returns the selected fields. Panics for Formula, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetSelectFields() []telemetrytypes.TelemetryFieldKey {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.SelectFields
|
||||
case QueryBuilderJoin:
|
||||
return spec.SelectFields
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetLegend returns the legend label. Panics for QueryBuilderJoin.
|
||||
func (q *QueryEnvelope) GetLegend() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Legend
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Legend
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Legend
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Legend
|
||||
case QueryBuilderFormula:
|
||||
return spec.Legend
|
||||
case PromQuery:
|
||||
return spec.Legend
|
||||
case ClickHouseQuery:
|
||||
return spec.Legend
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetCursor returns the pagination cursor. Panics for Formula, Join, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetCursor() string {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Cursor
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Cursor
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Cursor
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Cursor
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetStepInterval returns the step interval. Panics for Formula, Join, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetStepInterval() Step {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.StepInterval
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.StepInterval
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.StepInterval
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.StepInterval
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetSecondaryAggregations returns the secondary aggregations. Panics for TraceOperator, Formula, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetSecondaryAggregations() []SecondaryAggregation {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.SecondaryAggregations
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.SecondaryAggregations
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.SecondaryAggregations
|
||||
case QueryBuilderJoin:
|
||||
return spec.SecondaryAggregations
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
|
||||
// GetLimitBy returns the limit-by configuration. Panics for TraceOperator, Formula, Join, PromQuery and ClickHouseQuery.
|
||||
func (q *QueryEnvelope) GetLimitBy() *LimitBy {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.LimitBy
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.LimitBy
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.LimitBy
|
||||
}
|
||||
panic("unsupported spec type")
|
||||
}
|
||||
458
pkg/types/querybuildertypes/querybuildertypesv5/req_setters.go
Normal file
458
pkg/types/querybuildertypes/querybuildertypesv5/req_setters.go
Normal file
@@ -0,0 +1,458 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
// SetExpression sets the expression string of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetExpression(expression string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Expression = expression
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Expression = expression
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetReturnSpansFrom sets the return-spans-from value, if applicable.
|
||||
func (q *QueryEnvelope) SetReturnSpansFrom(returnSpansFrom string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.ReturnSpansFrom = returnSpansFrom
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSignal sets the signal of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSignal(signal telemetrytypes.Signal) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Signal = signal
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Signal = signal
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Signal = signal
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSource sets the source of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSource(source telemetrytypes.Source) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Source = source
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Source = source
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Source = source
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetQuery sets the raw query string of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetQuery(query string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
spec.Query = query
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Query = query
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetStep sets the PromQL step size, if applicable.
|
||||
func (q *QueryEnvelope) SetStep(step Step) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
spec.Step = step
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetStats sets the PromQL stats flag, if applicable.
|
||||
func (q *QueryEnvelope) SetStats(stats bool) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case PromQuery:
|
||||
spec.Stats = stats
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLeft sets the left query reference of a join, if applicable.
|
||||
func (q *QueryEnvelope) SetLeft(left QueryRef) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.Left = left
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetRight sets the right query reference of a join, if applicable.
|
||||
func (q *QueryEnvelope) SetRight(right QueryRef) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.Right = right
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetJoinType sets the join type, if applicable.
|
||||
func (q *QueryEnvelope) SetJoinType(joinType JoinType) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.Type = joinType
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetOn sets the join ON condition, if applicable.
|
||||
func (q *QueryEnvelope) SetOn(on string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderJoin:
|
||||
spec.On = on
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetQueryName sets the name of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetQueryName(name string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Name = name
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetDisabled sets the disabled flag of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetDisabled(disabled bool) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Disabled = disabled
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLimit sets the row limit of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetLimit(limit int) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Limit = limit
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetOffset sets the row offset of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetOffset(offset int) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Offset = offset
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetType sets the QueryType of the envelope.
|
||||
func (q *QueryEnvelope) SetType(t QueryType) {
|
||||
q.Type = t
|
||||
}
|
||||
|
||||
// SetOrder sets the order-by clauses of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetOrder(order []OrderBy) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Order = order
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetGroupBy sets the group-by keys of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetGroupBy(groupBy []GroupByKey) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.GroupBy = groupBy
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetFilter sets the filter of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetFilter(filter *Filter) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Filter = filter
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetHaving sets the having clause of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetHaving(having *Having) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Having = having
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetFunctions sets the post-processing functions of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetFunctions(functions []Function) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.Functions = functions
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSelectFields sets the selected fields of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSelectFields(fields []telemetrytypes.TelemetryFieldKey) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.SelectFields = fields
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLegend sets the legend label of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetLegend(legend string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case QueryBuilderFormula:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case PromQuery:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
case ClickHouseQuery:
|
||||
spec.Legend = legend
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetCursor sets the pagination cursor of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetCursor(cursor string) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.Cursor = cursor
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetStepInterval sets the step interval of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetStepInterval(step Step) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderTraceOperator:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.StepInterval = step
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetSecondaryAggregations sets the secondary aggregations of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetSecondaryAggregations(secondaryAggregations []SecondaryAggregation) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
case QueryBuilderJoin:
|
||||
spec.SecondaryAggregations = secondaryAggregations
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
|
||||
// SetLimitBy sets the limit-by configuration of the spec, if applicable.
|
||||
func (q *QueryEnvelope) SetLimitBy(limitBy *LimitBy) {
|
||||
switch spec := q.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
spec.LimitBy = limitBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
spec.LimitBy = limitBy
|
||||
q.Spec = spec
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
spec.LimitBy = limitBy
|
||||
q.Spec = spec
|
||||
}
|
||||
}
|
||||
@@ -30,15 +30,3 @@ var (
|
||||
func (r RequestType) IsAggregation() bool {
|
||||
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
|
||||
}
|
||||
|
||||
// Enum implements jsonschema.Enum; returns the acceptable values for RequestType.
|
||||
func (RequestType) Enum() []any {
|
||||
return []any{
|
||||
RequestTypeScalar,
|
||||
RequestTypeTimeSeries,
|
||||
RequestTypeRaw,
|
||||
RequestTypeRawStream,
|
||||
RequestTypeTrace,
|
||||
// RequestTypeDistribution,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type QBEvent struct {
|
||||
@@ -43,17 +42,6 @@ type QueryData struct {
|
||||
Results []any `json:"results"`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryData{}
|
||||
|
||||
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
|
||||
func (QueryData) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
TimeSeriesData{},
|
||||
ScalarData{},
|
||||
RawData{},
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRangeResponse struct {
|
||||
Type RequestType `json:"type"`
|
||||
Data QueryData `json:"data"`
|
||||
@@ -64,14 +52,6 @@ type QueryRangeResponse struct {
|
||||
QBEvent *QBEvent `json:"-"`
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeResponse{}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
|
||||
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeSeriesData struct {
|
||||
QueryName string `json:"queryName"`
|
||||
Aggregations []*AggregationBucket `json:"aggregations"`
|
||||
@@ -179,14 +159,6 @@ var (
|
||||
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for ColumnType.
|
||||
func (ColumnType) Enum() []any {
|
||||
return []any{
|
||||
ColumnTypeGroup,
|
||||
ColumnTypeAggregation,
|
||||
}
|
||||
}
|
||||
|
||||
type ColumnDescriptor struct {
|
||||
telemetrytypes.TelemetryFieldKey
|
||||
QueryName string `json:"queryName"`
|
||||
|
||||
@@ -10,55 +10,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// queryName returns the name from any query envelope spec type.
|
||||
func (e QueryEnvelope) queryName() string {
|
||||
switch spec := e.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderFormula:
|
||||
return spec.Name
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Name
|
||||
case QueryBuilderJoin:
|
||||
return spec.Name
|
||||
case PromQuery:
|
||||
return spec.Name
|
||||
case ClickHouseQuery:
|
||||
return spec.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// isDisabled returns the disabled status from any query envelope spec type.
|
||||
func (e QueryEnvelope) isDisabled() bool {
|
||||
switch spec := e.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderFormula:
|
||||
return spec.Disabled
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Disabled
|
||||
case QueryBuilderJoin:
|
||||
return spec.Disabled
|
||||
case PromQuery:
|
||||
return spec.Disabled
|
||||
case ClickHouseQuery:
|
||||
return spec.Disabled
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
|
||||
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
|
||||
name := envelope.queryName()
|
||||
name := envelope.GetQueryName()
|
||||
|
||||
var typeLabel string
|
||||
switch envelope.Type {
|
||||
@@ -471,7 +425,7 @@ func (r *QueryRangeRequest) Validate() error {
|
||||
// validateAllQueriesNotDisabled validates that at least one query in the composite query is enabled
|
||||
func (r *QueryRangeRequest) validateAllQueriesNotDisabled() error {
|
||||
for _, envelope := range r.CompositeQuery.Queries {
|
||||
if !envelope.isDisabled() {
|
||||
if !envelope.GetDisabled() {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -506,7 +460,7 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
|
||||
|
||||
// Check name uniqueness for builder queries
|
||||
if envelope.Type == QueryTypeBuilder || envelope.Type == QueryTypeSubQuery {
|
||||
name := envelope.queryName()
|
||||
name := envelope.GetQueryName()
|
||||
if name != "" {
|
||||
if queryNames[name] {
|
||||
return errors.NewInvalidInputf(
|
||||
|
||||
@@ -816,7 +816,7 @@ func TestQueryEnvelope_Helpers(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.envelope.queryName()
|
||||
got := tt.envelope.GetQueryName()
|
||||
if got != tt.want {
|
||||
t.Errorf("queryName() = %q, want %q", got, tt.want)
|
||||
}
|
||||
@@ -868,7 +868,7 @@ func TestQueryEnvelope_Helpers(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.envelope.isDisabled()
|
||||
got := tt.envelope.GetDisabled()
|
||||
if got != tt.want {
|
||||
t.Errorf("isDisabled() = %v, want %v", got, tt.want)
|
||||
}
|
||||
|
||||
35
pkg/types/rawdataexport.go
Normal file
35
pkg/types/rawdataexport.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package types
|
||||
|
||||
// ExportRawDataQueryParams represents the query parameters for the export raw data endpoint
|
||||
type ExportRawDataQueryParams struct {
|
||||
ExportRawDataFormatQueryParam
|
||||
|
||||
// Source specifies the type of data to export: "logs" or "traces"
|
||||
Source string `query:"source,default=logs" default:"logs" enum:"logs,traces"`
|
||||
|
||||
// Start is the start time for the query (Unix timestamp in nanoseconds)
|
||||
Start uint64 `query:"start"`
|
||||
|
||||
// End is the end time for the query (Unix timestamp in nanoseconds)
|
||||
End uint64 `query:"end"`
|
||||
|
||||
// Limit specifies the maximum number of rows to export
|
||||
Limit int `query:"limit,default=10000" default:"10000" minimum:"1" maximum:"50000"`
|
||||
|
||||
// Filter is a filter expression to apply to the query
|
||||
Filter string `query:"filter"`
|
||||
|
||||
// Columns specifies the columns to include in the export
|
||||
// Format: ["context.field:type", "context.field", "field"]
|
||||
Columns []string `query:"columns"`
|
||||
|
||||
// OrderBy specifies the sorting order
|
||||
// Format: "column:direction" or "context.field:type:direction"
|
||||
// Direction can be "asc" or "desc"
|
||||
OrderBy string `query:"order_by"`
|
||||
}
|
||||
|
||||
type ExportRawDataFormatQueryParam struct {
|
||||
// Format specifies the output format: "csv" or "jsonl"
|
||||
Format string `query:"format,default=csv" default:"csv" enum:"csv,jsonl"`
|
||||
}
|
||||
@@ -23,10 +23,8 @@ const (
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
BodyJSONStringSearchPrefix = "body."
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArraySepSuffix = "[]"
|
||||
// TODO(Piyush): Remove once we've migrated to the new array syntax
|
||||
ArrayAnyIndex = "[*]."
|
||||
ArrayAnyIndexSuffix = "[*]"
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
|
||||
@@ -172,18 +172,3 @@ func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldContext.
|
||||
func (FieldContext) Enum() []any {
|
||||
return []any{
|
||||
FieldContextMetric,
|
||||
FieldContextLog,
|
||||
FieldContextSpan,
|
||||
// FieldContextTrace,
|
||||
FieldContextResource,
|
||||
// FieldContextScope,
|
||||
FieldContextAttribute,
|
||||
// FieldContextEvent,
|
||||
FieldContextBody,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,14 +93,6 @@ var (
|
||||
FieldDataTypeArrayFloat64: "Array(Float64)",
|
||||
FieldDataTypeArrayBool: "Array(Bool)",
|
||||
}
|
||||
|
||||
ScalerFieldTypeToArrayFieldType = map[FieldDataType]FieldDataType{
|
||||
FieldDataTypeString: FieldDataTypeArrayString,
|
||||
FieldDataTypeBool: FieldDataTypeArrayBool,
|
||||
FieldDataTypeNumber: FieldDataTypeArrayNumber,
|
||||
FieldDataTypeInt64: FieldDataTypeArrayInt64,
|
||||
FieldDataTypeFloat64: FieldDataTypeArrayFloat64,
|
||||
}
|
||||
)
|
||||
|
||||
func (f FieldDataType) CHDataType() string {
|
||||
@@ -177,19 +169,3 @@ func (f FieldDataType) TagDataType() string {
|
||||
return "string"
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldDataType.
|
||||
func (FieldDataType) Enum() []any {
|
||||
return []any{
|
||||
FieldDataTypeString,
|
||||
FieldDataTypeBool,
|
||||
FieldDataTypeFloat64,
|
||||
FieldDataTypeInt64,
|
||||
FieldDataTypeNumber,
|
||||
// FieldDataTypeArrayString,
|
||||
// FieldDataTypeArrayFloat64,
|
||||
// FieldDataTypeArrayBool,
|
||||
// FieldDataTypeArrayInt64,
|
||||
// FieldDataTypeArrayNumber,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,12 +12,3 @@ var (
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
SignalUnspecified = Signal{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for Signal.
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,10 +10,3 @@ var (
|
||||
SourceMeter = Source{valuer.NewString("meter")}
|
||||
SourceUnspecified = Source{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for Source.
|
||||
func (Source) Enum() []any {
|
||||
return []any{
|
||||
SourceMeter,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,7 +65,6 @@ func TestJSONTypeSet() (map[string][]JSONDataType, MetadataStore) {
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {ArrayInt64, ArrayString},
|
||||
"message": {String},
|
||||
"tags": {ArrayString},
|
||||
}
|
||||
|
||||
return types, nil
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.types import SigNoz
|
||||
|
||||
DUPLICATE_USER_EMAIL = "duplicate@integration.test"
|
||||
|
||||
|
||||
def test_duplicate_user_invite_rejected(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""
|
||||
Verify that the unique index on (email, org_id) in the users table prevents
|
||||
creating duplicate users. This invites a new user, accepts the invite, then
|
||||
tries to invite and accept the same email again expecting a failure.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Step 1: Invite a new user.
|
||||
initial_invite_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": DUPLICATE_USER_EMAIL, "role": "EDITOR"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert initial_invite_response.status_code == HTTPStatus.CREATED
|
||||
initial_invite_token = initial_invite_response.json()["data"]["token"]
|
||||
|
||||
# Step 2: Accept the invite to create the user.
|
||||
initial_accept_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
|
||||
json={"token": initial_invite_token, "password": "password123Z$"},
|
||||
timeout=2,
|
||||
)
|
||||
assert initial_accept_response.status_code == HTTPStatus.CREATED
|
||||
|
||||
# Step 3: Invite the same email again.
|
||||
duplicate_invite_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": DUPLICATE_USER_EMAIL, "role": "VIEWER"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
# The invite creation itself may be rejected if the app checks for existing users.
|
||||
if duplicate_invite_response.status_code != HTTPStatus.CREATED:
|
||||
assert duplicate_invite_response.status_code == HTTPStatus.CONFLICT
|
||||
return
|
||||
|
||||
duplicate_invite_token = duplicate_invite_response.json()["data"]["token"]
|
||||
|
||||
# Step 4: Accept the duplicate invite — should fail due to unique constraint.
|
||||
duplicate_accept_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
|
||||
json={"token": duplicate_invite_token, "password": "password123Z$"},
|
||||
timeout=2,
|
||||
)
|
||||
assert duplicate_accept_response.status_code == HTTPStatus.CONFLICT
|
||||
617
tests/integration/src/rawexportdata/01_logs.py
Normal file
617
tests/integration/src/rawexportdata/01_logs.py
Normal file
@@ -0,0 +1,617 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
from urllib.parse import urlencode
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
|
||||
|
||||
def test_export_logs_csv(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 3 logs with different severity levels and attributes.
|
||||
|
||||
Tests:
|
||||
1. Export logs as CSV format
|
||||
2. Verify CSV structure and content
|
||||
3. Validate headers are present
|
||||
4. Check log data is correctly formatted
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="Application started successfully",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-01",
|
||||
},
|
||||
attributes={
|
||||
"http.method": "GET",
|
||||
"http.status_code": 200,
|
||||
"user.id": "user123",
|
||||
},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=8),
|
||||
body="Connection to database failed",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-01",
|
||||
},
|
||||
attributes={
|
||||
"error.type": "ConnectionError",
|
||||
"db.name": "production_db",
|
||||
},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Request processed",
|
||||
severity_text="DEBUG",
|
||||
resources={
|
||||
"service.name": "worker-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-02",
|
||||
},
|
||||
attributes={
|
||||
"request.id": "req-456",
|
||||
"duration_ms": 150.5,
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
}
|
||||
|
||||
# Export logs as CSV (default format, no source needed)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows, got {len(rows)}"
|
||||
|
||||
# Verify log bodies are present in the exported data
|
||||
bodies = [row.get("body") for row in rows]
|
||||
assert "Application started successfully" in bodies
|
||||
assert "Connection to database failed" in bodies
|
||||
assert "Request processed" in bodies
|
||||
|
||||
# Verify severity levels
|
||||
severities = [row.get("severity_text") for row in rows]
|
||||
assert "INFO" in severities
|
||||
assert "ERROR" in severities
|
||||
assert "DEBUG" in severities
|
||||
|
||||
|
||||
def test_export_logs_jsonl(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 2 logs with different attributes.
|
||||
|
||||
Tests:
|
||||
1. Export logs as JSONL format
|
||||
2. Verify JSONL structure and content
|
||||
3. Check each line is valid JSON
|
||||
4. Validate log data is correctly formatted
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="User logged in",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "auth-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"user.email": "test@example.com",
|
||||
"session.id": "sess-789",
|
||||
},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Payment processed successfully",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "payment-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"transaction.id": "txn-123",
|
||||
"amount": 99.99,
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
}
|
||||
|
||||
# Export logs as JSONL
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 2, f"Expected 2 lines, got {len(jsonl_lines)}"
|
||||
|
||||
# Verify each line is valid JSON
|
||||
json_objects = []
|
||||
for line in jsonl_lines:
|
||||
obj = json.loads(line)
|
||||
json_objects.append(obj)
|
||||
assert "id" in obj
|
||||
assert "timestamp" in obj
|
||||
assert "body" in obj
|
||||
assert "severity_text" in obj
|
||||
|
||||
# Verify log bodies
|
||||
bodies = [obj.get("body") for obj in json_objects]
|
||||
assert "User logged in" in bodies
|
||||
assert "Payment processed successfully" in bodies
|
||||
|
||||
|
||||
def test_export_logs_with_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs with different severity levels.
|
||||
|
||||
Tests:
|
||||
1. Export logs with filter applied
|
||||
2. Verify only filtered logs are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="Info message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=8),
|
||||
body="Error message",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Another error message",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
"filter": "severity_text = 'ERROR'",
|
||||
}
|
||||
|
||||
# Export logs with filter
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 2, f"Expected 2 lines (filtered), got {len(jsonl_lines)}"
|
||||
|
||||
# Verify only ERROR logs are returned
|
||||
for line in jsonl_lines:
|
||||
obj = json.loads(line)
|
||||
assert obj["severity_text"] == "ERROR"
|
||||
assert "error message" in obj["body"].lower()
|
||||
|
||||
|
||||
def test_export_logs_with_limit(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 5 logs.
|
||||
|
||||
Tests:
|
||||
1. Export logs with limit applied
|
||||
2. Verify only limited number of logs are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
logs = []
|
||||
for i in range(5):
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=i),
|
||||
body=f"Log message {i}",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={
|
||||
"index": i,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "csv",
|
||||
"source": "logs",
|
||||
"limit": 3,
|
||||
}
|
||||
|
||||
# Export logs with limit
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows (limited), got {len(rows)}"
|
||||
|
||||
|
||||
def test_export_logs_with_columns(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs with various attributes.
|
||||
|
||||
Tests:
|
||||
1. Export logs with specific columns
|
||||
2. Verify only specified columns are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="Test log message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
"deployment.environment": "production",
|
||||
},
|
||||
attributes={
|
||||
"http.method": "GET",
|
||||
"http.status_code": 200,
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
# Request only specific columns
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "csv",
|
||||
"source": "logs",
|
||||
"columns": ["timestamp", "severity_text", "body"],
|
||||
}
|
||||
|
||||
# Export logs with specific columns
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params, doseq=True)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 1
|
||||
|
||||
# Verify the specified columns are present
|
||||
row = rows[0]
|
||||
assert "timestamp" in row
|
||||
assert "severity_text" in row
|
||||
assert "body" in row
|
||||
assert row["severity_text"] == "INFO"
|
||||
assert row["body"] == "Test log message"
|
||||
|
||||
|
||||
def test_export_logs_with_order_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs at different timestamps.
|
||||
|
||||
Tests:
|
||||
1. Export logs with ascending timestamp order
|
||||
2. Verify logs are returned in correct order
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="First log",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="Second log",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=1),
|
||||
body="Third log",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
"order_by": "timestamp:asc",
|
||||
}
|
||||
|
||||
# Export logs with ascending order
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 3
|
||||
|
||||
# Verify order - first log should be "First log" (oldest)
|
||||
json_objects = [json.loads(line) for line in jsonl_lines]
|
||||
assert json_objects[0]["body"] == "First log"
|
||||
assert json_objects[1]["body"] == "Second log"
|
||||
assert json_objects[2]["body"] == "Third log"
|
||||
|
||||
|
||||
def test_export_logs_with_complex_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert logs with various service names and severity levels.
|
||||
|
||||
Tests:
|
||||
1. Export logs with complex filter (multiple conditions)
|
||||
2. Verify only logs matching all conditions are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
body="API error occurred",
|
||||
severity_text="ERROR",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=8),
|
||||
body="Worker info message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "worker-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=5),
|
||||
body="API info message",
|
||||
severity_text="INFO",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
# Filter for api-service AND ERROR severity
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "logs",
|
||||
"filter": "service.name = 'api-service' AND severity_text = 'ERROR'",
|
||||
}
|
||||
|
||||
# Export logs with complex filter
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1, f"Expected 1 line (complex filter), got {len(jsonl_lines)}"
|
||||
|
||||
# Verify the filtered log
|
||||
filtered_obj = json.loads(jsonl_lines[0])
|
||||
assert filtered_obj["body"] == "API error occurred"
|
||||
assert filtered_obj["severity_text"] == "ERROR"
|
||||
773
tests/integration/src/rawexportdata/02_traces.py
Normal file
773
tests/integration/src/rawexportdata/02_traces.py
Normal file
@@ -0,0 +1,773 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
from urllib.parse import urlencode
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCode
|
||||
|
||||
|
||||
def test_export_traces_csv(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 3 traces with different attributes.
|
||||
|
||||
Tests:
|
||||
1. Export traces as CSV format
|
||||
2. Verify CSV structure and content
|
||||
3. Validate headers are present
|
||||
4. Check trace data is correctly formatted
|
||||
"""
|
||||
http_service_trace_id = TraceIdGenerator.trace_id()
|
||||
http_service_span_id = TraceIdGenerator.span_id()
|
||||
http_service_db_span_id = TraceIdGenerator.span_id()
|
||||
topic_service_trace_id = TraceIdGenerator.trace_id()
|
||||
topic_service_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=3),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_span_id,
|
||||
parent_span_id="",
|
||||
name="POST /integration",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
},
|
||||
attributes={
|
||||
"net.transport": "IP.TCP",
|
||||
"http.scheme": "http",
|
||||
"http.user_agent": "Integration Test",
|
||||
"http.request.method": "POST",
|
||||
"http.response.status_code": "200",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=3.5),
|
||||
duration=timedelta(seconds=0.5),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_db_span_id,
|
||||
parent_span_id=http_service_span_id,
|
||||
name="SELECT",
|
||||
kind=TracesKind.SPAN_KIND_CLIENT,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
},
|
||||
attributes={
|
||||
"db.name": "integration",
|
||||
"db.operation": "SELECT",
|
||||
"db.statement": "SELECT * FROM integration",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=1),
|
||||
duration=timedelta(seconds=2),
|
||||
trace_id=topic_service_trace_id,
|
||||
span_id=topic_service_span_id,
|
||||
parent_span_id="",
|
||||
name="topic publish",
|
||||
kind=TracesKind.SPAN_KIND_PRODUCER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "topic-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-001",
|
||||
},
|
||||
attributes={
|
||||
"message.type": "SENT",
|
||||
"messaging.operation": "publish",
|
||||
"messaging.message.id": "001",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"source": "traces",
|
||||
"limit": 1000,
|
||||
}
|
||||
|
||||
# Export traces as CSV (GET for simple queries)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows, got {len(rows)}"
|
||||
|
||||
# Verify trace IDs are present in the exported data
|
||||
trace_ids = [row.get("trace_id") for row in rows]
|
||||
assert http_service_trace_id in trace_ids
|
||||
assert topic_service_trace_id in trace_ids
|
||||
|
||||
# Verify span names are present
|
||||
span_names = [row.get("name") for row in rows]
|
||||
assert "POST /integration" in span_names
|
||||
assert "SELECT" in span_names
|
||||
assert "topic publish" in span_names
|
||||
|
||||
|
||||
def test_export_traces_jsonl(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 2 traces with different attributes.
|
||||
|
||||
Tests:
|
||||
1. Export traces as JSONL format
|
||||
2. Verify JSONL structure and content
|
||||
3. Check each line is valid JSON
|
||||
4. Validate trace data is correctly formatted
|
||||
"""
|
||||
http_service_trace_id = TraceIdGenerator.trace_id()
|
||||
http_service_span_id = TraceIdGenerator.span_id()
|
||||
topic_service_trace_id = TraceIdGenerator.trace_id()
|
||||
topic_service_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=3),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_span_id,
|
||||
parent_span_id="",
|
||||
name="POST /api/test",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "api-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"http.request.method": "POST",
|
||||
"http.response.status_code": "201",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=2),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=topic_service_trace_id,
|
||||
span_id=topic_service_span_id,
|
||||
parent_span_id="",
|
||||
name="queue.process",
|
||||
kind=TracesKind.SPAN_KIND_CONSUMER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "queue-service",
|
||||
"deployment.environment": "staging",
|
||||
},
|
||||
attributes={
|
||||
"messaging.operation": "process",
|
||||
"messaging.system": "rabbitmq",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "traces",
|
||||
"limit": 1000,
|
||||
}
|
||||
|
||||
# Export traces as JSONL (GET for simple queries)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
assert "attachment" in response.headers.get("Content-Disposition", "")
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 2, f"Expected 2 lines, got {len(jsonl_lines)}"
|
||||
|
||||
# Verify each line is valid JSON
|
||||
json_objects = []
|
||||
for line in jsonl_lines:
|
||||
obj = json.loads(line)
|
||||
json_objects.append(obj)
|
||||
assert "trace_id" in obj
|
||||
assert "span_id" in obj
|
||||
assert "name" in obj
|
||||
|
||||
# Verify trace IDs are present
|
||||
trace_ids = [obj.get("trace_id") for obj in json_objects]
|
||||
assert http_service_trace_id in trace_ids
|
||||
assert topic_service_trace_id in trace_ids
|
||||
|
||||
# Verify span names are present
|
||||
span_names = [obj.get("name") for obj in json_objects]
|
||||
assert "POST /api/test" in span_names
|
||||
assert "queue.process" in span_names
|
||||
|
||||
|
||||
def test_export_traces_with_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert traces with different service names.
|
||||
|
||||
Tests:
|
||||
1. Export traces with filter applied
|
||||
2. Verify only filtered traces are returned
|
||||
"""
|
||||
service_a_trace_id = TraceIdGenerator.trace_id()
|
||||
service_a_span_id = TraceIdGenerator.span_id()
|
||||
service_b_trace_id = TraceIdGenerator.trace_id()
|
||||
service_b_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=service_a_trace_id,
|
||||
span_id=service_a_span_id,
|
||||
parent_span_id="",
|
||||
name="operation-a",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "service-a",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=2),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=service_b_trace_id,
|
||||
span_id=service_b_span_id,
|
||||
parent_span_id="",
|
||||
name="operation-b",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "service-b",
|
||||
},
|
||||
attributes={},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "jsonl",
|
||||
"source": "traces",
|
||||
"limit": 1000,
|
||||
"filter": "service.name = 'service-a'",
|
||||
}
|
||||
|
||||
# Export traces with filter (GET supports filter param)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1, f"Expected 1 line (filtered), got {len(jsonl_lines)}"
|
||||
|
||||
# Verify the filtered trace
|
||||
filtered_obj = json.loads(jsonl_lines[0])
|
||||
assert filtered_obj["trace_id"] == service_a_trace_id
|
||||
assert filtered_obj["name"] == "operation-a"
|
||||
|
||||
|
||||
def test_export_traces_with_limit(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 5 traces.
|
||||
|
||||
Tests:
|
||||
1. Export traces with limit applied
|
||||
2. Verify only limited number of traces are returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
traces = []
|
||||
for i in range(5):
|
||||
traces.append(
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=i),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=TraceIdGenerator.trace_id(),
|
||||
span_id=TraceIdGenerator.span_id(),
|
||||
parent_span_id="",
|
||||
name=f"operation-{i}",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
},
|
||||
attributes={},
|
||||
)
|
||||
)
|
||||
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
params = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"format": "csv",
|
||||
"source": "traces",
|
||||
"limit": 3,
|
||||
}
|
||||
|
||||
# Export traces with limit (GET supports limit param)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/export_raw_data?{urlencode(params)}"),
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "text/csv"
|
||||
|
||||
# Parse CSV content
|
||||
csv_content = response.text
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
rows = list(csv_reader)
|
||||
assert len(rows) == 3, f"Expected 3 rows (limited), got {len(rows)}"
|
||||
|
||||
|
||||
def test_export_traces_multiple_queries_rejected(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Tests:
|
||||
1. POST with multiple builder queries but no trace operator is rejected
|
||||
2. Verify 400 error is returned
|
||||
"""
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
body = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"compositeQuery": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "A",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "service.name = 'service-a'"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "B",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "service.name = 'service-b'"},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=body,
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_export_traces_with_composite_query_trace_operator(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert multiple traces with parent-child relationships.
|
||||
|
||||
Tests:
|
||||
1. Export traces using trace operator in composite query (POST)
|
||||
2. Verify trace operator query works correctly
|
||||
"""
|
||||
parent_trace_id = TraceIdGenerator.trace_id()
|
||||
parent_span_id = TraceIdGenerator.span_id()
|
||||
child_span_id_1 = TraceIdGenerator.span_id()
|
||||
child_span_id_2 = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
duration=timedelta(seconds=5),
|
||||
trace_id=parent_trace_id,
|
||||
span_id=parent_span_id,
|
||||
parent_span_id="",
|
||||
name="parent-operation",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "parent-service",
|
||||
},
|
||||
attributes={
|
||||
"operation.type": "parent",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=9),
|
||||
duration=timedelta(seconds=2),
|
||||
trace_id=parent_trace_id,
|
||||
span_id=child_span_id_1,
|
||||
parent_span_id=parent_span_id,
|
||||
name="child-operation-1",
|
||||
kind=TracesKind.SPAN_KIND_INTERNAL,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "parent-service",
|
||||
},
|
||||
attributes={
|
||||
"operation.type": "child",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=7),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=parent_trace_id,
|
||||
span_id=child_span_id_2,
|
||||
parent_span_id=parent_span_id,
|
||||
name="child-operation-2",
|
||||
kind=TracesKind.SPAN_KIND_INTERNAL,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "parent-service",
|
||||
},
|
||||
attributes={
|
||||
"operation.type": "child",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
# A: spans with operation.type = 'parent'
|
||||
query_a = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "A",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "operation.type = 'parent'"},
|
||||
},
|
||||
}
|
||||
|
||||
# B: spans with operation.type = 'child'
|
||||
query_b = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "B",
|
||||
"limit": 1000,
|
||||
"filter": {"expression": "operation.type = 'child'"},
|
||||
},
|
||||
}
|
||||
|
||||
# Trace operator: find traces where A has a direct descendant B
|
||||
query_c = {
|
||||
"type": "builder_trace_operator",
|
||||
"spec": {
|
||||
"name": "C",
|
||||
"expression": "A => B",
|
||||
"returnSpansFrom": "A",
|
||||
"limit": 1000,
|
||||
"order": [{"key": {"name": "timestamp"}, "direction": "desc"}]
|
||||
},
|
||||
}
|
||||
|
||||
body = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": {
|
||||
"queries": [query_a, query_b, query_c],
|
||||
},
|
||||
}
|
||||
|
||||
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=body,
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
||||
print(response.text)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1, f"Expected at least 1 line, got {len(jsonl_lines)}"
|
||||
|
||||
# Verify all returned spans belong to the matched trace
|
||||
json_objects = [json.loads(line) for line in jsonl_lines]
|
||||
trace_ids = [obj.get("trace_id") for obj in json_objects]
|
||||
assert all(tid == parent_trace_id for tid in trace_ids)
|
||||
|
||||
# Verify the parent span (returnSpansFrom = "A") is present
|
||||
span_names = [obj.get("name") for obj in json_objects]
|
||||
assert "parent-operation" in span_names
|
||||
|
||||
|
||||
def test_export_traces_with_select_fields(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert traces with various attributes.
|
||||
|
||||
Tests:
|
||||
1. Export traces with specific select fields via POST
|
||||
2. Verify only specified fields are returned in the output
|
||||
"""
|
||||
trace_id = TraceIdGenerator.trace_id()
|
||||
span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=10),
|
||||
duration=timedelta(seconds=2),
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
parent_span_id="",
|
||||
name="test-operation",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"service.name": "test-service",
|
||||
"deployment.environment": "production",
|
||||
"host.name": "server-01",
|
||||
},
|
||||
attributes={
|
||||
"http.method": "POST",
|
||||
"http.status_code": "201",
|
||||
"user.id": "user123",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Calculate timestamps in nanoseconds
|
||||
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
|
||||
end_ns = int(now.timestamp() * 1e9)
|
||||
|
||||
body = {
|
||||
"start": start_ns,
|
||||
"end": end_ns,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"signal": "traces",
|
||||
"name": "A",
|
||||
"limit": 1000,
|
||||
"selectFields": [
|
||||
{
|
||||
"name": "trace_id",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "span",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "span_id",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "span",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "span",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "service.name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "resource",
|
||||
"signal": "traces",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
|
||||
response = requests.post(
|
||||
url,
|
||||
json=body,
|
||||
timeout=10,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.headers["Content-Type"] == "application/x-ndjson"
|
||||
|
||||
# Parse JSONL content
|
||||
jsonl_lines = response.text.strip().split("\n")
|
||||
assert len(jsonl_lines) == 1
|
||||
|
||||
# Verify the selected fields are present
|
||||
result = json.loads(jsonl_lines[0])
|
||||
assert "trace_id" in result
|
||||
assert "span_id" in result
|
||||
assert "name" in result
|
||||
|
||||
# Verify values
|
||||
assert result["trace_id"] == trace_id
|
||||
assert result["span_id"] == span_id
|
||||
assert result["name"] == "test-operation"
|
||||
Reference in New Issue
Block a user