mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-24 05:10:27 +00:00
Compare commits
61 Commits
nv/6204
...
feat/cloud
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42300bd0a0 | ||
|
|
6e52f2c8f0 | ||
|
|
d9f8a4ae5a | ||
|
|
eefe3edffd | ||
|
|
2051861a03 | ||
|
|
4b01a40fb9 | ||
|
|
2d8a00bf18 | ||
|
|
f1b26b310f | ||
|
|
2c438b6c32 | ||
|
|
1814c2d13c | ||
|
|
e6cd771f11 | ||
|
|
6b94f87ca0 | ||
|
|
bf315253ae | ||
|
|
668ff7bc39 | ||
|
|
07f2aa52fd | ||
|
|
3416b3ad55 | ||
|
|
d6caa4f2c7 | ||
|
|
f86371566d | ||
|
|
9115803084 | ||
|
|
0c14d8f966 | ||
|
|
7afb461af8 | ||
|
|
a21fbb4ee0 | ||
|
|
0369842f3d | ||
|
|
59cd96562a | ||
|
|
cc4475cab7 | ||
|
|
ac8c648420 | ||
|
|
bede6be4b8 | ||
|
|
dd3d60e6df | ||
|
|
538ab686d2 | ||
|
|
936a325cb9 | ||
|
|
c6cdcd0143 | ||
|
|
cd9211d718 | ||
|
|
0601c28782 | ||
|
|
580610dbfa | ||
|
|
2d2aa02a81 | ||
|
|
dd9723ad13 | ||
|
|
3651469416 | ||
|
|
febce75734 | ||
|
|
e1616f3487 | ||
|
|
4b94287ac7 | ||
|
|
1575c7c54c | ||
|
|
8def3f835b | ||
|
|
11ed15f4c5 | ||
|
|
f47877cca9 | ||
|
|
bb2b9215ba | ||
|
|
3111904223 | ||
|
|
003e2c30d8 | ||
|
|
00fe516d10 | ||
|
|
0305f4f7db | ||
|
|
c60019a6dc | ||
|
|
acde2a37fa | ||
|
|
945241a52a | ||
|
|
e967f80c86 | ||
|
|
a09dc325de | ||
|
|
379b4f7fc4 | ||
|
|
5e536ae077 | ||
|
|
234585e642 | ||
|
|
2cc14f1ad4 | ||
|
|
dc4ed4d239 | ||
|
|
7281c36873 | ||
|
|
40288776e8 |
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -17,7 +17,5 @@
|
||||
},
|
||||
"[html]": {
|
||||
"editor.defaultFormatter": "vscode.html-language-features"
|
||||
},
|
||||
"python-envs.defaultEnvManager": "ms-python.python:system",
|
||||
"python-envs.pythonProjects": []
|
||||
}
|
||||
}
|
||||
|
||||
1110
docs/api/openapi.yml
1110
docs/api/openapi.yml
File diff suppressed because it is too large
Load Diff
@@ -123,7 +123,6 @@ if err := router.Handle("/api/v1/things", handler.New(
|
||||
Description: "This endpoint creates a thing",
|
||||
Request: new(types.PostableThing),
|
||||
RequestContentType: "application/json",
|
||||
RequestQuery: new(types.QueryableThing),
|
||||
Response: new(types.GettableThing),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
@@ -156,8 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestQuery**:
|
||||
- `RequestQuery` is a Go type that descirbes query url params.
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
|
||||
@@ -273,7 +273,6 @@ Options can be simple (direct link) or nested (with another question):
|
||||
- Place logo files in `public/Logos/`
|
||||
- Use SVG format
|
||||
- Reference as `"/Logos/your-logo.svg"`
|
||||
- **Fetching Icons**: New icons can be easily fetched from [OpenBrand](https://openbrand.sh/). Use the pattern `https://openbrand.sh/?url=<TARGET_URL>`, where `<TARGET_URL>` is the URL-encoded link to the service's website. For example, to get Render's logo, use [https://openbrand.sh/?url=https%3A%2F%2Frender.com](https://openbrand.sh/?url=https%3A%2F%2Frender.com).
|
||||
- **Optimize new SVGs**: Run any newly downloaded SVGs through an optimizer like [SVGOMG (svgo)](https://svgomg.net/) or use `npx svgo public/Logos/your-logo.svg` to minimise their size before committing.
|
||||
|
||||
### 4. Links
|
||||
|
||||
@@ -57,10 +57,6 @@ func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.openfgaServer.Start(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Healthy() <-chan struct{} {
|
||||
return provider.openfgaServer.Healthy()
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.openfgaServer.Stop(ctx)
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ type Server struct {
|
||||
}
|
||||
|
||||
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
|
||||
|
||||
return &Server{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
}, nil
|
||||
@@ -25,10 +26,6 @@ func (server *Server) Start(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Start(ctx)
|
||||
}
|
||||
|
||||
func (server *Server) Healthy() <-chan struct{} {
|
||||
return server.pkgAuthzService.Healthy()
|
||||
}
|
||||
|
||||
func (server *Server) Stop(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#fa520f" viewBox="0 0 24 24"><title>Mistral AI</title><path d="M17.143 3.429v3.428h-3.429v3.429h-3.428V6.857H6.857V3.43H3.43v13.714H0v3.428h10.286v-3.428H6.857v-3.429h3.429v3.429h3.429v-3.429h3.428v3.429h-3.428v3.428H24v-3.428h-3.43V3.429z"/></svg>
|
||||
|
Before Width: | Height: | Size: 294 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 120 120"><defs><linearGradient id="a" x1="0%" x2="100%" y1="0%" y2="100%"><stop offset="0%" stop-color="#ff4d4d"/><stop offset="100%" stop-color="#991b1b"/></linearGradient></defs><path fill="url(#a)" d="M60 10c-30 0-45 25-45 45s15 40 30 45v10h10v-10s5 2 10 0v10h10v-10c15-5 30-25 30-45S90 10 60 10"/><path fill="url(#a)" d="M20 45C5 40 0 50 5 60s15 5 20-5c3-7 0-10-5-10"/><path fill="url(#a)" d="M100 45c15-5 20 5 15 15s-15 5-20-5c-3-7 0-10 5-10"/><path stroke="#ff4d4d" stroke-linecap="round" stroke-width="3" d="M45 15Q35 5 30 8M75 15Q85 5 90 8"/><circle cx="45" cy="35" r="6" fill="#050810"/><circle cx="75" cy="35" r="6" fill="#050810"/><circle cx="46" cy="34" r="2.5" fill="#00e5cc"/><circle cx="76" cy="34" r="2.5" fill="#00e5cc"/></svg>
|
||||
|
Before Width: | Height: | Size: 809 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><title>Render</title><path d="M18.263.007c-3.121-.147-5.744 2.109-6.192 5.082-.018.138-.045.272-.067.405-.696 3.703-3.936 6.507-7.827 6.507a7.9 7.9 0 0 1-3.825-.979.202.202 0 0 0-.302.178V24H12v-8.999c0-1.656 1.338-3 2.987-3h2.988c3.382 0 6.103-2.817 5.97-6.244-.12-3.084-2.61-5.603-5.682-5.75"/></svg>
|
||||
|
Before Width: | Height: | Size: 362 B |
@@ -1,250 +0,0 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useQuery } from 'react-query';
|
||||
|
||||
import type { ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
Healthz200,
|
||||
Healthz503,
|
||||
Livez200,
|
||||
Readyz200,
|
||||
Readyz503,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
/**
|
||||
* @summary Health check
|
||||
*/
|
||||
export const healthz = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<Healthz200>({
|
||||
url: `/api/v2/healthz`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getHealthzQueryKey = () => {
|
||||
return [`/api/v2/healthz`] as const;
|
||||
};
|
||||
|
||||
export const getHealthzQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof healthz>>,
|
||||
TError = ErrorType<Healthz503>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof healthz>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getHealthzQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof healthz>>> = ({
|
||||
signal,
|
||||
}) => healthz(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof healthz>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type HealthzQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof healthz>>
|
||||
>;
|
||||
export type HealthzQueryError = ErrorType<Healthz503>;
|
||||
|
||||
/**
|
||||
* @summary Health check
|
||||
*/
|
||||
|
||||
export function useHealthz<
|
||||
TData = Awaited<ReturnType<typeof healthz>>,
|
||||
TError = ErrorType<Healthz503>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof healthz>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getHealthzQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Health check
|
||||
*/
|
||||
export const invalidateHealthz = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getHealthzQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* @summary Liveness check
|
||||
*/
|
||||
export const livez = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<Livez200>({
|
||||
url: `/api/v2/livez`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getLivezQueryKey = () => {
|
||||
return [`/api/v2/livez`] as const;
|
||||
};
|
||||
|
||||
export const getLivezQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof livez>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof livez>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getLivezQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof livez>>> = ({
|
||||
signal,
|
||||
}) => livez(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof livez>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type LivezQueryResult = NonNullable<Awaited<ReturnType<typeof livez>>>;
|
||||
export type LivezQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Liveness check
|
||||
*/
|
||||
|
||||
export function useLivez<
|
||||
TData = Awaited<ReturnType<typeof livez>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof livez>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getLivezQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Liveness check
|
||||
*/
|
||||
export const invalidateLivez = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries({ queryKey: getLivezQueryKey() }, options);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* @summary Readiness check
|
||||
*/
|
||||
export const readyz = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<Readyz200>({
|
||||
url: `/api/v2/readyz`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReadyzQueryKey = () => {
|
||||
return [`/api/v2/readyz`] as const;
|
||||
};
|
||||
|
||||
export const getReadyzQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof readyz>>,
|
||||
TError = ErrorType<Readyz503>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof readyz>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getReadyzQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof readyz>>> = ({
|
||||
signal,
|
||||
}) => readyz(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof readyz>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type ReadyzQueryResult = NonNullable<Awaited<ReturnType<typeof readyz>>>;
|
||||
export type ReadyzQueryError = ErrorType<Readyz503>;
|
||||
|
||||
/**
|
||||
* @summary Readiness check
|
||||
*/
|
||||
|
||||
export function useReadyz<
|
||||
TData = Awaited<ReturnType<typeof readyz>>,
|
||||
TError = ErrorType<Readyz503>
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof readyz>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getReadyzQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Readiness check
|
||||
*/
|
||||
export const invalidateReadyz = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getReadyzQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
@@ -20,113 +20,11 @@ import { useMutation, useQuery } from 'react-query';
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
HandleExportRawDataPOSTParams,
|
||||
ListPromotedAndIndexedPaths200,
|
||||
PromotetypesPromotePathDTO,
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
/**
|
||||
* This endpoints allows complex query exporting raw data for traces and logs
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const handleExportRawDataPOST = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: BodyType<Querybuildertypesv5QueryRangeRequestDTO>,
|
||||
params?: HandleExportRawDataPOSTParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/export_raw_data`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getHandleExportRawDataPOSTMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['handleExportRawDataPOST'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
}
|
||||
> = (props) => {
|
||||
const { data, params } = props ?? {};
|
||||
|
||||
return handleExportRawDataPOST(data, params);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type HandleExportRawDataPOSTMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>
|
||||
>;
|
||||
export type HandleExportRawDataPOSTMutationBody = BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
export type HandleExportRawDataPOSTMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Export raw data
|
||||
*/
|
||||
export const useHandleExportRawDataPOST = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
|
||||
TError,
|
||||
{
|
||||
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
|
||||
params?: HandleExportRawDataPOSTParams;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getHandleExportRawDataPOSTMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoints promotes and indexes paths
|
||||
* @summary Promote and index paths
|
||||
|
||||
@@ -543,23 +543,6 @@ export interface ErrorsResponseerroradditionalDTO {
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type FactoryResponseDTOServices = { [key: string]: string[] } | null;
|
||||
|
||||
export interface FactoryResponseDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
healthy?: boolean;
|
||||
/**
|
||||
* @type object
|
||||
* @nullable true
|
||||
*/
|
||||
services?: FactoryResponseDTOServices;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
@@ -2959,19 +2942,6 @@ export type DeleteAuthDomainPathParameters = {
|
||||
export type UpdateAuthDomainPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type HandleExportRawDataPOSTParams = {
|
||||
/**
|
||||
* @enum csv,jsonl
|
||||
* @type string
|
||||
* @description The output format for the export.
|
||||
*/
|
||||
format?: HandleExportRawDataPOSTFormat;
|
||||
};
|
||||
|
||||
export enum HandleExportRawDataPOSTFormat {
|
||||
csv = 'csv',
|
||||
jsonl = 'jsonl',
|
||||
}
|
||||
export type GetFieldsKeysParams = {
|
||||
/**
|
||||
* @description undefined
|
||||
@@ -3487,30 +3457,6 @@ export type SearchIngestionKeys200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type Healthz200 = {
|
||||
data: FactoryResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type Healthz503 = {
|
||||
data: FactoryResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type Livez200 = {
|
||||
data: FactoryResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type ListMetricsParams = {
|
||||
/**
|
||||
* @type integer
|
||||
@@ -3646,22 +3592,6 @@ export type GetMyOrganization200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type Readyz200 = {
|
||||
data: FactoryResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type Readyz503 = {
|
||||
data: FactoryResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type GetSessionContext200 = {
|
||||
data: AuthtypesSessionContextDTO;
|
||||
/**
|
||||
|
||||
@@ -7,7 +7,7 @@ import ROUTES from 'constants/routes';
|
||||
import useUpdatedQuery from 'container/GridCardLayout/useResolveQuery';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
|
||||
@@ -79,7 +79,7 @@ export function useNavigateToExplorer(): (
|
||||
);
|
||||
|
||||
const { getUpdatedQuery } = useUpdatedQuery();
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
return useCallback(
|
||||
|
||||
@@ -86,8 +86,8 @@ jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): { selectedDashboard: undefined } => ({
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): { selectedDashboard: undefined } => ({
|
||||
selectedDashboard: undefined,
|
||||
}),
|
||||
}));
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import { ReactNode } from 'react';
|
||||
import { MemoryRouter, useLocation } from 'react-router-dom';
|
||||
import { useDashboardBootstrap } from 'hooks/dashboard/useDashboardBootstrap';
|
||||
import {
|
||||
getDashboardById,
|
||||
getNonIntegrationDashboardById,
|
||||
@@ -8,9 +6,10 @@ import {
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import {
|
||||
resetDashboard,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
DashboardContext,
|
||||
DashboardProvider,
|
||||
} from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardContext } from 'providers/Dashboard/types';
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
@@ -22,18 +21,6 @@ import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DashboardDescription from '..';
|
||||
|
||||
function DashboardBootstrapWrapper({
|
||||
dashboardId,
|
||||
children,
|
||||
}: {
|
||||
dashboardId: string;
|
||||
children: ReactNode;
|
||||
}): JSX.Element {
|
||||
useDashboardBootstrap(dashboardId);
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
interface MockSafeNavigateReturn {
|
||||
safeNavigate: jest.MockedFunction<(url: string) => void>;
|
||||
}
|
||||
@@ -67,7 +54,6 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
beforeEach(() => {
|
||||
mockSafeNavigate.mockClear();
|
||||
sessionStorage.clear();
|
||||
resetDashboard();
|
||||
});
|
||||
|
||||
it('unlock dashboard should be disabled for integrations created dashboards', async () => {
|
||||
@@ -78,7 +64,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardBootstrapWrapper dashboardId="4">
|
||||
<DashboardProvider dashboardId="4">
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
@@ -87,7 +73,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -119,7 +105,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardBootstrapWrapper dashboardId="4">
|
||||
<DashboardProvider dashboardId="4">
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
@@ -128,7 +114,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -158,7 +144,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
|
||||
const { getByText } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardBootstrapWrapper dashboardId="4">
|
||||
<DashboardProvider dashboardId="4">
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
@@ -167,7 +153,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -195,26 +181,37 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
|
||||
useDashboardStore.setState({
|
||||
const mockContextValue: IDashboardContext = {
|
||||
isDashboardLocked: false,
|
||||
handleDashboardLockToggle: jest.fn(),
|
||||
dashboardResponse: {} as IDashboardContext['dashboardResponse'],
|
||||
selectedDashboard: (getDashboardById.data as unknown) as Dashboard,
|
||||
layouts: [],
|
||||
panelMap: {},
|
||||
setPanelMap: jest.fn(),
|
||||
setLayouts: jest.fn(),
|
||||
setSelectedDashboard: jest.fn(),
|
||||
updatedTimeRef: { current: null },
|
||||
updateLocalStorageDashboardVariables: jest.fn(),
|
||||
dashboardQueryRangeCalled: false,
|
||||
setDashboardQueryRangeCalled: jest.fn(),
|
||||
isDashboardFetching: false,
|
||||
columnWidths: {},
|
||||
});
|
||||
setColumnWidths: jest.fn(),
|
||||
};
|
||||
|
||||
const { getByText } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
enter: (): Promise<void> => Promise.resolve(),
|
||||
exit: (): Promise<void> => Promise.resolve(),
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
<DashboardContext.Provider value={mockContextValue}>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
enter: (): Promise<void> => Promise.resolve(),
|
||||
exit: (): Promise<void> => Promise.resolve(),
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardContext.Provider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteBu
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useGetPublicDashboardMeta } from 'hooks/dashboard/useGetPublicDashboardMeta';
|
||||
import { useLockDashboard } from 'hooks/dashboard/useLockDashboard';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -40,11 +39,8 @@ import {
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { sortLayout } from 'providers/Dashboard/util';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
import { Props } from 'types/api/dashboard/update';
|
||||
@@ -83,11 +79,10 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
setPanelMap,
|
||||
layouts,
|
||||
setLayouts,
|
||||
isDashboardLocked,
|
||||
setSelectedDashboard,
|
||||
} = useDashboardStore();
|
||||
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
const handleDashboardLockToggle = useLockDashboard();
|
||||
handleDashboardLockToggle,
|
||||
} = useDashboard();
|
||||
|
||||
const variablesSettingsTabHandle = useRef<VariablesSettingsTab>(null);
|
||||
const [isSettingsDrawerOpen, setIsSettingsDrawerOpen] = useState<boolean>(
|
||||
|
||||
@@ -30,7 +30,7 @@ import {
|
||||
Pyramid,
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
@@ -239,7 +239,7 @@ function VariableItem({
|
||||
|
||||
const [selectedWidgets, setSelectedWidgets] = useState<string[]>([]);
|
||||
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -2,7 +2,7 @@ import React from 'react';
|
||||
import { CustomMultiSelect } from 'components/NewSelect';
|
||||
import { PANEL_GROUP_TYPES } from 'constants/queryBuilder';
|
||||
import { generateGridTitle } from 'container/GridPanelSwitch/utils';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { WidgetRow, Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
export function WidgetSelector({
|
||||
@@ -12,7 +12,7 @@ export function WidgetSelector({
|
||||
selectedWidgets: string[];
|
||||
setSelectedWidgets: (widgets: string[]) => void;
|
||||
}): JSX.Element {
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
// Get layout IDs for cross-referencing
|
||||
const layoutIds = new Set(
|
||||
|
||||
@@ -19,8 +19,8 @@ import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { PenLine, Trash2 } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { TVariableMode } from './types';
|
||||
@@ -87,7 +87,7 @@ function VariablesSettings({
|
||||
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
|
||||
const { selectedDashboard, setSelectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard, setSelectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
@@ -5,7 +5,7 @@ import AddTags from 'container/DashboardContainer/DashboardSettings/General/AddT
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { Button } from './styles';
|
||||
import { Base64Icons } from './utils';
|
||||
@@ -15,7 +15,7 @@ import './GeneralSettings.styles.scss';
|
||||
const { Option } = Select;
|
||||
|
||||
function GeneralDashboardSettings(): JSX.Element {
|
||||
const { selectedDashboard, setSelectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard, setSelectedDashboard } = useDashboard();
|
||||
|
||||
const updateDashboardMutation = useUpdateDashboard();
|
||||
|
||||
|
||||
@@ -7,14 +7,14 @@ import {
|
||||
unpublishedPublicDashboardMeta,
|
||||
} from 'mocks-server/__mockdata__/publicDashboard';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
|
||||
import PublicDashboardSetting from '../index';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore');
|
||||
jest.mock('providers/Dashboard/Dashboard');
|
||||
jest.mock('react-use', () => ({
|
||||
...jest.requireActual('react-use'),
|
||||
useCopyToClipboard: jest.fn(),
|
||||
@@ -26,7 +26,7 @@ jest.mock('@signozhq/sonner', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
const mockUseDashboard = jest.mocked(useDashboardStore);
|
||||
const mockUseDashboard = jest.mocked(useDashboard);
|
||||
const mockUseCopyToClipboard = jest.mocked(useCopyToClipboard);
|
||||
const mockToast = jest.mocked(toast);
|
||||
|
||||
@@ -67,10 +67,10 @@ beforeEach(() => {
|
||||
// Mock window.open
|
||||
window.open = jest.fn();
|
||||
|
||||
// Mock useDashboardStore
|
||||
// Mock useDashboard
|
||||
mockUseDashboard.mockReturnValue(({
|
||||
selectedDashboard: mockSelectedDashboard,
|
||||
} as unknown) as ReturnType<typeof useDashboardStore>);
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
// Mock useCopyToClipboard
|
||||
mockUseCopyToClipboard.mockReturnValue(([
|
||||
|
||||
@@ -11,7 +11,7 @@ import { useGetPublicDashboardMeta } from 'hooks/dashboard/useGetPublicDashboard
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { Copy, ExternalLink, Globe, Info, Loader2, Trash } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PublicDashboardMetaProps } from 'types/api/dashboard/public/getMeta';
|
||||
import APIError from 'types/api/error';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
@@ -59,7 +59,7 @@ function PublicDashboardSetting(): JSX.Element {
|
||||
const [defaultTimeRange, setDefaultTimeRange] = useState('30m');
|
||||
const [, setCopyPublicDashboardURL] = useCopyToClipboard();
|
||||
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
|
||||
@@ -3,14 +3,13 @@ import { memo, useCallback, useEffect, useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Row } from 'antd';
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import { updateLocalStorageDashboardVariable } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import {
|
||||
useDashboardVariables,
|
||||
useDashboardVariablesSelector,
|
||||
} from 'hooks/dashboard/useDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { updateDashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
@@ -19,23 +18,23 @@ import {
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import VariableItem from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
function DashboardVariableSelection(): JSX.Element | null {
|
||||
const { dashboardId, setSelectedDashboard } = useDashboardStore(
|
||||
useShallow((s) => ({
|
||||
dashboardId: s.selectedDashboard?.id ?? '',
|
||||
setSelectedDashboard: s.setSelectedDashboard,
|
||||
})),
|
||||
);
|
||||
const {
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboard();
|
||||
|
||||
const { updateUrlVariable } = useVariablesFromUrl();
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const dashboardId = useDashboardVariablesSelector(
|
||||
(state) => state.dashboardId,
|
||||
);
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
@@ -83,13 +82,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
// This makes localStorage much lighter by avoiding storing all individual values
|
||||
const variable = dashboardVariables[id] || dashboardVariables[name];
|
||||
const isDynamic = variable.type === 'DYNAMIC';
|
||||
updateLocalStorageDashboardVariable(
|
||||
dashboardId,
|
||||
name,
|
||||
value,
|
||||
allSelected,
|
||||
isDynamic,
|
||||
);
|
||||
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
|
||||
|
||||
if (allSelected) {
|
||||
updateUrlVariable(name || id, ALL_SELECTED_VALUE);
|
||||
@@ -157,7 +150,13 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
// Safe to call synchronously now that the store already has the updated value.
|
||||
enqueueDescendantsOfVariable(name);
|
||||
},
|
||||
[dashboardId, dashboardVariables, updateUrlVariable, setSelectedDashboard],
|
||||
[
|
||||
dashboardId,
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
updateUrlVariable,
|
||||
setSelectedDashboard,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -32,22 +32,11 @@ const mockVariableItemCallbacks: {
|
||||
// Mock providers/Dashboard/Dashboard
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
interface MockDashboardStoreState {
|
||||
selectedDashboard?: { id: string };
|
||||
setSelectedDashboard: typeof mockSetSelectedDashboard;
|
||||
updateLocalStorageDashboardVariables: typeof mockUpdateLocalStorageDashboardVariables;
|
||||
}
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (
|
||||
selector?: (s: Record<string, unknown>) => MockDashboardStoreState,
|
||||
): MockDashboardStoreState => {
|
||||
const state = {
|
||||
selectedDashboard: { id: 'dash-1' },
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
};
|
||||
return selector ? selector(state) : state;
|
||||
},
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): Record<string, unknown> => ({
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock hooks/dashboard/useVariablesFromUrl
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { useCallback } from 'react';
|
||||
import { useAddDynamicVariableToPanels } from 'hooks/dashboard/useAddDynamicVariableToPanels';
|
||||
import { updateLocalStorageDashboardVariable } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { useShallow } from 'zustand/react/shallow';
|
||||
|
||||
import { convertVariablesToDbFormat } from './util';
|
||||
|
||||
@@ -39,16 +37,11 @@ interface UseDashboardVariableUpdateReturn {
|
||||
|
||||
export const useDashboardVariableUpdate = (): UseDashboardVariableUpdateReturn => {
|
||||
const {
|
||||
dashboardId,
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
} = useDashboardStore(
|
||||
useShallow((s) => ({
|
||||
dashboardId: s.selectedDashboard?.id ?? '',
|
||||
selectedDashboard: s.selectedDashboard,
|
||||
setSelectedDashboard: s.setSelectedDashboard,
|
||||
})),
|
||||
);
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboard();
|
||||
|
||||
const addDynamicVariableToPanels = useAddDynamicVariableToPanels();
|
||||
const updateMutation = useUpdateDashboard();
|
||||
|
||||
@@ -66,13 +59,7 @@ export const useDashboardVariableUpdate = (): UseDashboardVariableUpdateReturn =
|
||||
// This makes localStorage much lighter and more efficient.
|
||||
// currently all the variables are dynamic
|
||||
const isDynamic = true;
|
||||
updateLocalStorageDashboardVariable(
|
||||
dashboardId,
|
||||
name,
|
||||
value,
|
||||
allSelected,
|
||||
isDynamic,
|
||||
);
|
||||
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
|
||||
|
||||
if (selectedDashboard) {
|
||||
setSelectedDashboard((prev) => {
|
||||
@@ -110,7 +97,11 @@ export const useDashboardVariableUpdate = (): UseDashboardVariableUpdateReturn =
|
||||
}
|
||||
}
|
||||
},
|
||||
[dashboardId, selectedDashboard, setSelectedDashboard],
|
||||
[
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
],
|
||||
);
|
||||
|
||||
const updateVariables = useCallback(
|
||||
|
||||
@@ -49,8 +49,8 @@ const mockDashboard = {
|
||||
// Mock the dashboard provider with stable functions to prevent infinite loops
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): any => ({
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: mockDashboard,
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
|
||||
@@ -56,8 +56,8 @@ const mockDashboard = {
|
||||
},
|
||||
};
|
||||
// Mock dependencies
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): any => ({
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: mockDashboard,
|
||||
}),
|
||||
}));
|
||||
@@ -152,8 +152,8 @@ describe('Panel Management Tests', () => {
|
||||
};
|
||||
|
||||
// Temporarily mock the dashboard
|
||||
jest.doMock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): any => ({
|
||||
jest.doMock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: modifiedDashboard,
|
||||
}),
|
||||
}));
|
||||
|
||||
@@ -4,7 +4,7 @@ import ROUTES from 'constants/routes';
|
||||
import { DASHBOARDS_LIST_QUERY_PARAMS_STORAGE_KEY } from 'hooks/dashboard/useDashboardsListQueryParams';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { LayoutGrid } from 'lucide-react';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { Base64Icons } from '../../DashboardSettings/General/utils';
|
||||
@@ -13,7 +13,7 @@ import './DashboardBreadcrumbs.styles.scss';
|
||||
|
||||
function DashboardBreadcrumbs(): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const updatedAtRef = useRef(selectedDashboard?.updatedAt);
|
||||
|
||||
const selectedData = selectedDashboard
|
||||
|
||||
@@ -6,10 +6,7 @@ import { useNotifications } from 'hooks/useNotifications';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
|
||||
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { getChartManagerColumns } from './getChartMangerColumns';
|
||||
import { ExtendedChartDataset, getDefaultTableDataSet } from './utils';
|
||||
@@ -53,7 +50,7 @@ export default function ChartManager({
|
||||
onToggleSeriesVisibility,
|
||||
syncSeriesVisibilityToLocalStorage,
|
||||
} = usePlotContext();
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
|
||||
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
|
||||
getDefaultTableDataSet(
|
||||
|
||||
@@ -32,18 +32,10 @@ jest.mock('lib/uPlotV2/hooks/useLegendsSync', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (
|
||||
selector?: (s: {
|
||||
selectedDashboard: { locked: boolean } | undefined;
|
||||
}) => { selectedDashboard: { locked: boolean } },
|
||||
): { selectedDashboard: { locked: boolean } } => {
|
||||
const mockState = { selectedDashboard: { locked: false } };
|
||||
return selector ? selector(mockState) : mockState;
|
||||
},
|
||||
selectIsDashboardLocked: (s: {
|
||||
selectedDashboard: { locked: boolean } | undefined;
|
||||
}): boolean => s.selectedDashboard?.locked ?? false,
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): { isDashboardLocked: boolean } => ({
|
||||
isDashboardLocked: false,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useNotifications', () => ({
|
||||
|
||||
@@ -8,11 +8,8 @@ import { VariablesSettingsTab } from 'container/DashboardContainer/DashboardDesc
|
||||
import DashboardSettings from 'container/DashboardContainer/DashboardSettings';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
|
||||
@@ -23,8 +20,7 @@ export default function DashboardEmptyState(): JSX.Element {
|
||||
(s) => s.setIsPanelTypeSelectionModalOpen,
|
||||
);
|
||||
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
const { selectedDashboard, isDashboardLocked } = useDashboard();
|
||||
|
||||
const variablesSettingsTabHandle = useRef<VariablesSettingsTab>(null);
|
||||
const [isSettingsDrawerOpen, setIsSettingsDrawerOpen] = useState<boolean>(
|
||||
|
||||
@@ -3,10 +3,7 @@ import { Button, Input } from 'antd';
|
||||
import type { CheckboxChangeEvent } from 'antd/es/checkbox';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { getGraphManagerTableColumns } from './TableRender/GraphManagerColumns';
|
||||
import { ExtendedChartDataset, GraphManagerProps } from './types';
|
||||
@@ -37,7 +34,7 @@ function GraphManager({
|
||||
}, [data, options]);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
|
||||
const checkBoxOnChangeHandler = useCallback(
|
||||
(e: CheckboxChangeEvent, index: number): void => {
|
||||
|
||||
@@ -39,10 +39,7 @@ import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariab
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Warning } from 'types/api';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@@ -84,8 +81,11 @@ function FullView({
|
||||
setCurrentGraphRef(fullViewRef);
|
||||
}, [setCurrentGraphRef]);
|
||||
|
||||
const { selectedDashboard, setColumnWidths } = useDashboardStore();
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
const {
|
||||
selectedDashboard,
|
||||
isDashboardLocked,
|
||||
setColumnWidths,
|
||||
} = useDashboard();
|
||||
|
||||
const onColumnWidthsChange = useCallback(
|
||||
(widths: Record<string, number>) => {
|
||||
|
||||
@@ -161,8 +161,8 @@ const mockProps: WidgetGraphComponentProps = {
|
||||
};
|
||||
|
||||
// Mock useDashabord hook
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): any => ({
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: {
|
||||
data: {
|
||||
variables: [],
|
||||
|
||||
@@ -28,7 +28,7 @@ import {
|
||||
getCustomTimeRangeWindowSweepInMS,
|
||||
getStartAndEndTimesInMilliseconds,
|
||||
} from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Props } from 'types/api/dashboard/update';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
@@ -106,7 +106,7 @@ function WidgetGraphComponent({
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
setColumnWidths,
|
||||
} = useDashboardStore();
|
||||
} = useDashboard();
|
||||
|
||||
const onColumnWidthsChange = useCallback(
|
||||
(widths: Record<string, number>) => {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -18,6 +17,7 @@ import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableRef
|
||||
import getTimeString from 'lib/getTimeString';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import APIError from 'types/api/error';
|
||||
@@ -68,19 +68,7 @@ function GridCardGraph({
|
||||
const [isInternalServerError, setIsInternalServerError] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
const queryRangeCalledRef = useRef(false);
|
||||
|
||||
useEffect(() => {
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (!queryRangeCalledRef.current) {
|
||||
Sentry.captureEvent({
|
||||
message: `Dashboard query range not called within expected timeframe for widget ${widget?.id}`,
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
}, 120000);
|
||||
return (): void => clearTimeout(timeoutId);
|
||||
}, [widget?.id]);
|
||||
const { setDashboardQueryRangeCalled } = useDashboard();
|
||||
|
||||
const {
|
||||
minTime,
|
||||
@@ -272,14 +260,14 @@ function GridCardGraph({
|
||||
});
|
||||
}
|
||||
}
|
||||
queryRangeCalledRef.current = true;
|
||||
setDashboardQueryRangeCalled(true);
|
||||
},
|
||||
onSettled: (data) => {
|
||||
dataAvailable?.(
|
||||
isDataAvailableByPanelType(data?.payload?.data, widget?.panelTypes),
|
||||
);
|
||||
getGraphData?.(data?.payload?.data);
|
||||
queryRangeCalledRef.current = true;
|
||||
setDashboardQueryRangeCalled(true);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { FullScreen, FullScreenHandle } from 'react-full-screen';
|
||||
import { ItemCallback, Layout } from 'react-grid-layout';
|
||||
import { useIsFetching } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Form, Input, Modal, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -12,7 +12,6 @@ import cx from 'classnames';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
@@ -32,10 +31,7 @@ import {
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { sortLayout } from 'providers/Dashboard/util';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
@@ -65,9 +61,6 @@ interface GraphLayoutProps {
|
||||
function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
const { handle, enableDrillDown = false } = props;
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const isDashboardFetching =
|
||||
useIsFetching([REACT_QUERY_KEY.DASHBOARD_BY_ID]) > 0;
|
||||
|
||||
const {
|
||||
selectedDashboard,
|
||||
layouts,
|
||||
@@ -75,9 +68,12 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
panelMap,
|
||||
setPanelMap,
|
||||
setSelectedDashboard,
|
||||
isDashboardLocked,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
isDashboardFetching,
|
||||
columnWidths,
|
||||
} = useDashboardStore();
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
} = useDashboard();
|
||||
const { data } = selectedDashboard || {};
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
@@ -141,6 +137,25 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
setDashboardLayout(sortLayout(layouts));
|
||||
}, [layouts]);
|
||||
|
||||
useEffect(() => {
|
||||
setDashboardQueryRangeCalled(false);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const timeoutId = setTimeout(() => {
|
||||
// Send Sentry event if query_range is not called within expected timeframe (2 mins) when there are widgets
|
||||
if (!dashboardQueryRangeCalled && data?.widgets?.length) {
|
||||
Sentry.captureEvent({
|
||||
message: `Dashboard query range not called within expected timeframe even when there are ${data?.widgets?.length} widgets`,
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
}, 120000);
|
||||
|
||||
return (): void => clearTimeout(timeoutId);
|
||||
}, [dashboardQueryRangeCalled, data?.widgets?.length]);
|
||||
|
||||
const logEventCalledRef = useRef(false);
|
||||
useEffect(() => {
|
||||
if (!logEventCalledRef.current && !isUndefined(data)) {
|
||||
|
||||
@@ -4,12 +4,9 @@ import { Button, Popover } from 'antd';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { EllipsisIcon, PenLine, Plus, X } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { usePanelTypeSelectionModalStore } from 'providers/Dashboard/helpers/panelTypeSelectionModalHelper';
|
||||
import { setSelectedRowWidgetId } from 'providers/Dashboard/helpers/selectedRowWidgetIdHelper';
|
||||
import {
|
||||
selectIsDashboardLocked,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
|
||||
@@ -42,8 +39,7 @@ export function WidgetRowHeader(props: WidgetRowHeaderProps): JSX.Element {
|
||||
(s) => s.setIsPanelTypeSelectionModalOpen,
|
||||
);
|
||||
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const isDashboardLocked = useDashboardStore(selectIsDashboardLocked);
|
||||
const { selectedDashboard, isDashboardLocked } = useDashboard();
|
||||
|
||||
const permissions: ComponentTypes[] = ['add_panel'];
|
||||
const { user } = useAppContext();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
@@ -121,7 +121,7 @@ function useNavigateToExplorerPages(): (
|
||||
) => Promise<{
|
||||
[queryName: string]: { filters: TagFilterItem[]; dataSource?: string };
|
||||
}> {
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
return useCallback(
|
||||
|
||||
@@ -92,8 +92,8 @@ jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): { selectedDashboard: undefined } => ({
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): { selectedDashboard: undefined } => ({
|
||||
selectedDashboard: undefined,
|
||||
}),
|
||||
}));
|
||||
|
||||
@@ -6,24 +6,11 @@
|
||||
// - Handling multiple rows correctly
|
||||
// - Handling widgets with different heights
|
||||
|
||||
import { ReactNode } from 'react';
|
||||
import { I18nextProvider } from 'react-i18next';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import { screen } from '@testing-library/react';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useDashboardBootstrap } from 'hooks/dashboard/useDashboardBootstrap';
|
||||
|
||||
function DashboardBootstrapWrapper({
|
||||
dashboardId,
|
||||
children,
|
||||
}: {
|
||||
dashboardId: string;
|
||||
children: ReactNode;
|
||||
}): JSX.Element {
|
||||
useDashboardBootstrap(dashboardId);
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <>{children}</>;
|
||||
}
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import i18n from 'ReactI18';
|
||||
import {
|
||||
@@ -322,7 +309,7 @@ describe('Stacking bar in new panel', () => {
|
||||
|
||||
const { container, getByText } = render(
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<DashboardBootstrapWrapper dashboardId="">
|
||||
<DashboardProvider dashboardId="">
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
dashboardId=""
|
||||
@@ -330,7 +317,7 @@ describe('Stacking bar in new panel', () => {
|
||||
selectedGraph={PANEL_TYPES.BAR}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</I18nextProvider>,
|
||||
);
|
||||
|
||||
@@ -375,13 +362,13 @@ describe('when switching to BAR panel type', () => {
|
||||
});
|
||||
|
||||
const { getByTestId, getByText, container } = render(
|
||||
<DashboardBootstrapWrapper dashboardId="">
|
||||
<DashboardProvider dashboardId="">
|
||||
<NewWidget
|
||||
dashboardId=""
|
||||
selectedDashboard={undefined}
|
||||
selectedGraph={PANEL_TYPES.BAR}
|
||||
/>
|
||||
</DashboardBootstrapWrapper>,
|
||||
</DashboardProvider>,
|
||||
);
|
||||
|
||||
expect(getByTestId('panel-change-select')).toHaveAttribute(
|
||||
|
||||
@@ -2,15 +2,16 @@ import { Dispatch, SetStateAction } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { IDashboardContext } from 'providers/Dashboard/types';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import { timePreferance } from './RightContainer/timeItems';
|
||||
|
||||
export interface NewWidgetProps {
|
||||
dashboardId: string;
|
||||
selectedDashboard: Dashboard | undefined;
|
||||
selectedDashboard: IDashboardContext['selectedDashboard'];
|
||||
selectedGraph: PANEL_TYPES;
|
||||
enableDrillDown?: boolean;
|
||||
}
|
||||
@@ -34,7 +35,7 @@ export interface WidgetGraphProps {
|
||||
>
|
||||
>;
|
||||
enableDrillDown?: boolean;
|
||||
selectedDashboard: Dashboard | undefined;
|
||||
selectedDashboard: IDashboardContext['selectedDashboard'];
|
||||
isNewPanel?: boolean;
|
||||
}
|
||||
|
||||
|
||||
@@ -6122,95 +6122,5 @@
|
||||
],
|
||||
"id": "huggingface-observability",
|
||||
"link": "/docs/huggingface-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "mistral-observability",
|
||||
"label": "Mistral AI",
|
||||
"imgUrl": "/Logos/mistral.svg",
|
||||
"tags": [
|
||||
"LLM Monitoring"
|
||||
],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"llm",
|
||||
"llm monitoring",
|
||||
"mistral",
|
||||
"mistral ai",
|
||||
"monitoring",
|
||||
"observability",
|
||||
"otel mistral",
|
||||
"traces",
|
||||
"tracing"
|
||||
],
|
||||
"id": "mistral-observability",
|
||||
"link": "/docs/mistral-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "openclaw-observability",
|
||||
"label": "OpenClaw",
|
||||
"imgUrl": "/Logos/openclaw.svg",
|
||||
"tags": [
|
||||
"LLM Monitoring"
|
||||
],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"llm",
|
||||
"llm monitoring",
|
||||
"monitoring",
|
||||
"observability",
|
||||
"openclaw",
|
||||
"otel openclaw",
|
||||
"traces",
|
||||
"tracing"
|
||||
],
|
||||
"id": "openclaw-observability",
|
||||
"link": "/docs/openclaw-monitoring/"
|
||||
},
|
||||
{
|
||||
"dataSource": "claude-agent-monitoring",
|
||||
"label": "Claude Agent SDK",
|
||||
"imgUrl": "/Logos/claude-code.svg",
|
||||
"tags": [
|
||||
"LLM Monitoring"
|
||||
],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"anthropic",
|
||||
"claude",
|
||||
"claude agent",
|
||||
"claude agent sdk",
|
||||
"claude sdk",
|
||||
"llm",
|
||||
"llm monitoring",
|
||||
"monitoring",
|
||||
"observability",
|
||||
"otel claude",
|
||||
"traces",
|
||||
"tracing"
|
||||
],
|
||||
"id": "claude-agent-monitoring",
|
||||
"link": "/docs/claude-agent-monitoring/"
|
||||
},
|
||||
{
|
||||
"dataSource": "render-metrics",
|
||||
"label": "Render",
|
||||
"imgUrl": "/Logos/render.svg",
|
||||
"tags": [
|
||||
"infrastructure monitoring",
|
||||
"metrics"
|
||||
],
|
||||
"module": "metrics",
|
||||
"relatedSearchKeywords": [
|
||||
"infrastructure",
|
||||
"metrics",
|
||||
"monitoring",
|
||||
"observability",
|
||||
"paas",
|
||||
"render",
|
||||
"render metrics",
|
||||
"render monitoring"
|
||||
],
|
||||
"id": "render-metrics",
|
||||
"link": "/docs/metrics-management/render-metrics/"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -11,7 +11,7 @@ import useContextVariables from 'hooks/dashboard/useContextVariables';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import ContextMenu from 'periscope/components/ContextMenu';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { ContextLinksData } from 'types/api/dashboard/getAll';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
@@ -66,7 +66,7 @@ const useBaseAggregateOptions = ({
|
||||
getUpdatedQuery,
|
||||
isLoading: isResolveQueryLoading,
|
||||
} = useUpdatedQuery();
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
useEffect(() => {
|
||||
if (!aggregateData) {
|
||||
|
||||
@@ -12,8 +12,8 @@ jest.mock('react-router-dom', () => ({
|
||||
}));
|
||||
|
||||
// Mock useDashabord hook
|
||||
jest.mock('providers/Dashboard/store/useDashboardStore', () => ({
|
||||
useDashboardStore: (): any => ({
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: {
|
||||
data: {
|
||||
variables: [],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { getLocalStorageDashboardVariables } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import { useDashboardVariablesFromLocalStorage } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import { useTransformDashboardVariables } from 'hooks/dashboard/useTransformDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
@@ -7,8 +7,8 @@ import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
jest.mock('hooks/dashboard/useDashboardFromLocalStorage');
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl');
|
||||
|
||||
const mockGetLocalStorageDashboardVariables = getLocalStorageDashboardVariables as jest.MockedFunction<
|
||||
typeof getLocalStorageDashboardVariables
|
||||
const mockUseDashboardVariablesFromLocalStorage = useDashboardVariablesFromLocalStorage as jest.MockedFunction<
|
||||
typeof useDashboardVariablesFromLocalStorage
|
||||
>;
|
||||
const mockUseVariablesFromUrl = useVariablesFromUrl as jest.MockedFunction<
|
||||
typeof useVariablesFromUrl
|
||||
@@ -46,7 +46,10 @@ const setupHook = (
|
||||
currentDashboard: Record<string, any> = {},
|
||||
urlVariables: Record<string, any> = {},
|
||||
): ReturnType<typeof useTransformDashboardVariables> => {
|
||||
mockGetLocalStorageDashboardVariables.mockReturnValue(currentDashboard as any);
|
||||
mockUseDashboardVariablesFromLocalStorage.mockReturnValue({
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables: jest.fn(),
|
||||
});
|
||||
mockUseVariablesFromUrl.mockReturnValue({
|
||||
getUrlVariables: () => urlVariables,
|
||||
setUrlVariables: jest.fn(),
|
||||
|
||||
@@ -1,164 +0,0 @@
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Modal } from 'antd';
|
||||
import dayjs from 'dayjs';
|
||||
import { useTransformDashboardVariables } from 'hooks/dashboard/useTransformDashboardVariables';
|
||||
import useTabVisibility from 'hooks/useTabFocus';
|
||||
import { getUpdatedLayout } from 'lib/dashboard/getUpdatedLayout';
|
||||
import { getMinMaxForSelectedTime } from 'lib/getMinMax';
|
||||
import { defaultTo } from 'lodash-es';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { sortLayout } from 'providers/Dashboard/util';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { useDashboardQuery } from './useDashboardQuery';
|
||||
import { useDashboardVariablesSync } from './useDashboardVariablesSync';
|
||||
|
||||
interface UseDashboardBootstrapOptions {
|
||||
/** Pass `onModal.confirm` from `Modal.useModal()` to get theme-aware modals. Falls back to static `Modal.confirm`. */
|
||||
confirm?: typeof Modal.confirm;
|
||||
}
|
||||
|
||||
export interface UseDashboardBootstrapReturn {
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
isFetching: boolean;
|
||||
error: unknown;
|
||||
}
|
||||
|
||||
export function useDashboardBootstrap(
|
||||
dashboardId: string,
|
||||
options: UseDashboardBootstrapOptions = {},
|
||||
): UseDashboardBootstrapReturn {
|
||||
const confirm = options.confirm ?? Modal.confirm;
|
||||
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
setSelectedDashboard,
|
||||
setLayouts,
|
||||
setPanelMap,
|
||||
resetDashboardStore,
|
||||
} = useDashboardStore();
|
||||
|
||||
const dashboardRef = useRef<Dashboard>();
|
||||
const modalRef = useRef<ReturnType<typeof Modal.confirm>>();
|
||||
|
||||
const isVisible = useTabVisibility();
|
||||
|
||||
const {
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
transformDashboardVariables,
|
||||
} = useTransformDashboardVariables(dashboardId);
|
||||
|
||||
// Keep the external variables store in sync with selectedDashboard
|
||||
useDashboardVariablesSync(dashboardId);
|
||||
|
||||
const dashboardQuery = useDashboardQuery(dashboardId);
|
||||
|
||||
// Handle new dashboard data: initialize on first load, detect changes on subsequent fetches.
|
||||
// React Query's structural sharing means this effect only fires when data actually changes.
|
||||
useEffect(() => {
|
||||
if (!dashboardQuery.data?.data) {
|
||||
return;
|
||||
}
|
||||
|
||||
const updatedDashboardData = transformDashboardVariables(
|
||||
dashboardQuery.data.data,
|
||||
);
|
||||
const updatedDate = dayjs(updatedDashboardData?.updatedAt);
|
||||
|
||||
// First load: initialize store and URL variables, then return
|
||||
if (!dashboardRef.current) {
|
||||
const variables = updatedDashboardData?.data?.variables;
|
||||
if (variables) {
|
||||
initializeDefaultVariables(variables, getUrlVariables, updateUrlVariable);
|
||||
}
|
||||
setSelectedDashboard(updatedDashboardData);
|
||||
dashboardRef.current = updatedDashboardData;
|
||||
setLayouts(sortLayout(getUpdatedLayout(updatedDashboardData?.data.layout)));
|
||||
setPanelMap(defaultTo(updatedDashboardData?.data?.panelMap, {}));
|
||||
return;
|
||||
}
|
||||
|
||||
// Subsequent fetches: skip if updatedAt hasn't advanced
|
||||
if (!updatedDate.isAfter(dayjs(dashboardRef.current.updatedAt))) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Data has changed: prompt user if tab is visible
|
||||
if (isVisible && dashboardRef.current.id === updatedDashboardData?.id) {
|
||||
const modal = confirm({
|
||||
centered: true,
|
||||
title: t('dashboard_has_been_updated'),
|
||||
content: t('do_you_want_to_refresh_the_dashboard'),
|
||||
onOk() {
|
||||
setSelectedDashboard(updatedDashboardData);
|
||||
|
||||
const { maxTime, minTime } = getMinMaxForSelectedTime(
|
||||
globalTime.selectedTime,
|
||||
globalTime.minTime,
|
||||
globalTime.maxTime,
|
||||
);
|
||||
dispatch({
|
||||
type: UPDATE_TIME_INTERVAL,
|
||||
payload: { maxTime, minTime, selectedTime: globalTime.selectedTime },
|
||||
});
|
||||
|
||||
dashboardRef.current = updatedDashboardData;
|
||||
setLayouts(
|
||||
sortLayout(getUpdatedLayout(updatedDashboardData?.data.layout)),
|
||||
);
|
||||
setPanelMap(defaultTo(updatedDashboardData?.data.panelMap, {}));
|
||||
},
|
||||
});
|
||||
modalRef.current = modal;
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dashboardQuery.data]);
|
||||
|
||||
// Refetch when tab becomes visible (after initial load)
|
||||
useEffect(() => {
|
||||
if (isVisible && dashboardRef.current && !!dashboardId) {
|
||||
dashboardQuery.refetch();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isVisible]);
|
||||
|
||||
// Dismiss stale modal when tab is hidden
|
||||
useEffect(() => {
|
||||
if (!isVisible && modalRef.current) {
|
||||
modalRef.current.destroy();
|
||||
}
|
||||
}, [isVisible]);
|
||||
|
||||
// Reset store on unmount so stale state doesn't bleed across dashboards
|
||||
useEffect(
|
||||
() => (): void => {
|
||||
resetDashboardStore();
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
return {
|
||||
isLoading: dashboardQuery.isLoading,
|
||||
isError: dashboardQuery.isError,
|
||||
isFetching: dashboardQuery.isFetching,
|
||||
error: dashboardQuery.error,
|
||||
};
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import setLocalStorageKey from 'api/browser/localstorage/set';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export interface LocalStoreDashboardVariables {
|
||||
[id: string]: {
|
||||
selectedValue: IDashboardVariable['selectedValue'];
|
||||
allSelected: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface DashboardLocalStorageVariables {
|
||||
[id: string]: LocalStoreDashboardVariables;
|
||||
}
|
||||
|
||||
function readAll(): DashboardLocalStorageVariables {
|
||||
const raw = getLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES);
|
||||
if (!raw) {
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
return JSON.parse(raw);
|
||||
} catch {
|
||||
console.error('Failed to parse dashboard variables from local storage');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function writeAll(data: DashboardLocalStorageVariables): void {
|
||||
try {
|
||||
setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, JSON.stringify(data));
|
||||
} catch {
|
||||
console.error('Failed to set dashboard variables in local storage');
|
||||
}
|
||||
}
|
||||
|
||||
/** Read the saved variable selections for a dashboard from localStorage. */
|
||||
export function getLocalStorageDashboardVariables(
|
||||
dashboardId: string,
|
||||
): LocalStoreDashboardVariables {
|
||||
return readAll()[dashboardId] ?? {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Write one variable's selection for a dashboard to localStorage.
|
||||
* All call sites write to the same store with no React state coordination.
|
||||
*/
|
||||
export function updateLocalStorageDashboardVariable(
|
||||
dashboardId: string,
|
||||
id: string,
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
isDynamic?: boolean,
|
||||
): void {
|
||||
const all = readAll();
|
||||
all[dashboardId] = {
|
||||
...(all[dashboardId] ?? {}),
|
||||
[id]:
|
||||
isDynamic && allSelected
|
||||
? {
|
||||
selectedValue: (undefined as unknown) as IDashboardVariable['selectedValue'],
|
||||
allSelected: true,
|
||||
}
|
||||
: { selectedValue, allSelected },
|
||||
};
|
||||
writeAll(all);
|
||||
}
|
||||
110
frontend/src/hooks/dashboard/useDashboardFromLocalStorage.tsx
Normal file
110
frontend/src/hooks/dashboard/useDashboardFromLocalStorage.tsx
Normal file
@@ -0,0 +1,110 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import setLocalStorageKey from 'api/browser/localstorage/set';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultTo } from 'lodash-es';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
interface LocalStoreDashboardVariables {
|
||||
[id: string]: {
|
||||
selectedValue: IDashboardVariable['selectedValue'];
|
||||
allSelected: boolean;
|
||||
};
|
||||
}
|
||||
interface DashboardLocalStorageVariables {
|
||||
[id: string]: LocalStoreDashboardVariables;
|
||||
}
|
||||
|
||||
export interface UseDashboardVariablesFromLocalStorageReturn {
|
||||
currentDashboard: LocalStoreDashboardVariables;
|
||||
updateLocalStorageDashboardVariables: (
|
||||
id: string,
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
isDynamic?: boolean,
|
||||
) => void;
|
||||
}
|
||||
|
||||
export const useDashboardVariablesFromLocalStorage = (
|
||||
dashboardId: string,
|
||||
): UseDashboardVariablesFromLocalStorageReturn => {
|
||||
const [
|
||||
allDashboards,
|
||||
setAllDashboards,
|
||||
] = useState<DashboardLocalStorageVariables>({});
|
||||
|
||||
const [
|
||||
currentDashboard,
|
||||
setCurrentDashboard,
|
||||
] = useState<LocalStoreDashboardVariables>({});
|
||||
|
||||
useEffect(() => {
|
||||
const localStoreDashboardVariablesString = getLocalStorageKey(
|
||||
LOCALSTORAGE.DASHBOARD_VARIABLES,
|
||||
);
|
||||
let localStoreDashboardVariables: DashboardLocalStorageVariables = {};
|
||||
if (localStoreDashboardVariablesString === null) {
|
||||
try {
|
||||
const serialzedData = JSON.stringify({
|
||||
[dashboardId]: {},
|
||||
});
|
||||
|
||||
setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, serialzedData);
|
||||
} catch {
|
||||
console.error('Failed to seralise the data');
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
localStoreDashboardVariables = JSON.parse(
|
||||
localStoreDashboardVariablesString,
|
||||
);
|
||||
} catch {
|
||||
console.error('Failed to parse dashboards from local storage');
|
||||
localStoreDashboardVariables = {};
|
||||
} finally {
|
||||
setAllDashboards(localStoreDashboardVariables);
|
||||
}
|
||||
}
|
||||
setCurrentDashboard(defaultTo(localStoreDashboardVariables[dashboardId], {}));
|
||||
}, [dashboardId]);
|
||||
|
||||
useEffect(() => {
|
||||
try {
|
||||
const serializedData = JSON.stringify(allDashboards);
|
||||
setLocalStorageKey(LOCALSTORAGE.DASHBOARD_VARIABLES, serializedData);
|
||||
} catch {
|
||||
console.error('Failed to set dashboards in local storage');
|
||||
}
|
||||
}, [allDashboards]);
|
||||
|
||||
useEffect(() => {
|
||||
setAllDashboards((prev) => ({
|
||||
...prev,
|
||||
[dashboardId]: { ...currentDashboard },
|
||||
}));
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [currentDashboard]);
|
||||
|
||||
const updateLocalStorageDashboardVariables = (
|
||||
id: string,
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
isDynamic?: boolean,
|
||||
): void => {
|
||||
setCurrentDashboard((prev) => ({
|
||||
...prev,
|
||||
[id]:
|
||||
isDynamic && allSelected
|
||||
? {
|
||||
selectedValue: (undefined as unknown) as IDashboardVariable['selectedValue'],
|
||||
allSelected: true,
|
||||
}
|
||||
: { selectedValue, allSelected },
|
||||
}));
|
||||
};
|
||||
|
||||
return {
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
};
|
||||
};
|
||||
@@ -1,49 +0,0 @@
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useSelector } from 'react-redux';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import {
|
||||
DASHBOARD_CACHE_TIME,
|
||||
DASHBOARD_CACHE_TIME_ON_REFRESH_ENABLED,
|
||||
} from 'constants/queryCacheTime';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
/**
|
||||
* Fetches a dashboard by ID. Handles auth gating, cache time based on
|
||||
* auto-refresh setting, and surfaces API errors via the error modal.
|
||||
*/
|
||||
export function useDashboardQuery(
|
||||
dashboardId: string,
|
||||
): UseQueryResult<SuccessResponseV2<Dashboard>> {
|
||||
const { isLoggedIn } = useAppContext();
|
||||
const { showErrorModal } = useErrorModal();
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
return useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
dashboardId,
|
||||
globalTime.isAutoRefreshDisabled,
|
||||
],
|
||||
{
|
||||
enabled: !!dashboardId && isLoggedIn,
|
||||
queryFn: () => getDashboard({ id: dashboardId }),
|
||||
refetchOnWindowFocus: false,
|
||||
cacheTime: globalTime.isAutoRefreshDisabled
|
||||
? DASHBOARD_CACHE_TIME
|
||||
: DASHBOARD_CACHE_TIME_ON_REFRESH_ENABLED,
|
||||
onError: (error) => {
|
||||
showErrorModal(error as APIError);
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import { useEffect } from 'react';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import {
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
DashboardStore,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
|
||||
import { useDashboardVariablesSelector } from './useDashboardVariables';
|
||||
|
||||
/**
|
||||
* Keeps the external variables store in sync with the zustand dashboard store.
|
||||
* When selectedDashboard changes, propagates variable updates to the variables store.
|
||||
*/
|
||||
export function useDashboardVariablesSync(dashboardId: string): void {
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
const savedDashboardId = useDashboardVariablesSelector((s) => s.dashboardId);
|
||||
const selectedDashboard = useDashboardStore(
|
||||
(s: DashboardStore) => s.selectedDashboard,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const updatedVariables = selectedDashboard?.data.variables || {};
|
||||
if (savedDashboardId !== dashboardId) {
|
||||
setDashboardVariablesStore({ dashboardId, variables: updatedVariables });
|
||||
} else if (!isEqual(dashboardVariables, updatedVariables)) {
|
||||
updateDashboardVariablesStore({ dashboardId, variables: updatedVariables });
|
||||
}
|
||||
}, [selectedDashboard]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
import { useMutation } from 'react-query';
|
||||
import locked from 'api/v1/dashboards/id/lock';
|
||||
import {
|
||||
getSelectedDashboard,
|
||||
useDashboardStore,
|
||||
} from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
/**
|
||||
* Hook for toggling dashboard locked state.
|
||||
* Calls the lock API and syncs the result into the Zustand store.
|
||||
*/
|
||||
export function useLockDashboard(): (value: boolean) => Promise<void> {
|
||||
const { showErrorModal } = useErrorModal();
|
||||
const { setSelectedDashboard } = useDashboardStore();
|
||||
|
||||
const { mutate: lockDashboard } = useMutation(locked, {
|
||||
onSuccess: (_, props) => {
|
||||
setSelectedDashboard((prev) =>
|
||||
prev ? { ...prev, locked: props.lock } : prev,
|
||||
);
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(error as APIError);
|
||||
},
|
||||
});
|
||||
|
||||
return async (value: boolean): Promise<void> => {
|
||||
const selectedDashboard = getSelectedDashboard();
|
||||
if (selectedDashboard) {
|
||||
try {
|
||||
await lockDashboard({
|
||||
id: selectedDashboard.id,
|
||||
lock: value,
|
||||
});
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import { getLocalStorageDashboardVariables } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import {
|
||||
useDashboardVariablesFromLocalStorage,
|
||||
UseDashboardVariablesFromLocalStorageReturn,
|
||||
} from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import useVariablesFromUrl, {
|
||||
UseVariablesFromUrlReturn,
|
||||
} from 'hooks/dashboard/useVariablesFromUrl';
|
||||
@@ -10,10 +13,14 @@ import { v4 as generateUUID } from 'uuid';
|
||||
|
||||
export function useTransformDashboardVariables(
|
||||
dashboardId: string,
|
||||
): Pick<UseVariablesFromUrlReturn, 'getUrlVariables' | 'updateUrlVariable'> & {
|
||||
transformDashboardVariables: (data: Dashboard) => Dashboard;
|
||||
currentDashboard: ReturnType<typeof getLocalStorageDashboardVariables>;
|
||||
} {
|
||||
): Pick<UseVariablesFromUrlReturn, 'getUrlVariables' | 'updateUrlVariable'> &
|
||||
UseDashboardVariablesFromLocalStorageReturn & {
|
||||
transformDashboardVariables: (data: Dashboard) => Dashboard;
|
||||
} {
|
||||
const {
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboardVariablesFromLocalStorage(dashboardId);
|
||||
const { getUrlVariables, updateUrlVariable } = useVariablesFromUrl();
|
||||
|
||||
const mergeDBWithLocalStorage = (
|
||||
@@ -73,7 +80,7 @@ export function useTransformDashboardVariables(
|
||||
if (data && data.data && data.data.variables) {
|
||||
const clonedDashboardData = mergeDBWithLocalStorage(
|
||||
JSON.parse(JSON.stringify(data)),
|
||||
getLocalStorageDashboardVariables(dashboardId),
|
||||
currentDashboard,
|
||||
);
|
||||
const { variables } = clonedDashboardData.data;
|
||||
const existingOrders: Set<number> = new Set();
|
||||
@@ -115,6 +122,7 @@ export function useTransformDashboardVariables(
|
||||
transformDashboardVariables,
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
currentDashboard: getLocalStorageDashboardVariables(dashboardId),
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
import update from 'api/v1/dashboards/id/update';
|
||||
import dayjs from 'dayjs';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
@@ -7,8 +9,14 @@ import { Props } from 'types/api/dashboard/update';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
export const useUpdateDashboard = (): UseUpdateDashboard => {
|
||||
const { updatedTimeRef } = useDashboard();
|
||||
const { showErrorModal } = useErrorModal();
|
||||
return useMutation(update, {
|
||||
onSuccess: (data) => {
|
||||
if (data.data) {
|
||||
updatedTimeRef.current = dayjs(data.data.updatedAt);
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(error);
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useMemo } from 'react';
|
||||
import { PANEL_GROUP_TYPES } from 'constants/queryBuilder';
|
||||
import { createDynamicVariableToWidgetsMap } from 'hooks/dashboard/utils';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useDashboardVariablesByType } from './useDashboardVariablesByType';
|
||||
@@ -12,7 +12,7 @@ import { useDashboardVariablesByType } from './useDashboardVariablesByType';
|
||||
*/
|
||||
export function useWidgetsByDynamicVariableId(): Record<string, string[]> {
|
||||
const dynamicVariables = useDashboardVariablesByType('DYNAMIC', 'values');
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
return useMemo(() => {
|
||||
const widgets =
|
||||
|
||||
@@ -17,7 +17,7 @@ import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@@ -33,7 +33,7 @@ const useCreateAlerts = (widget?: Widgets, caller?: string): VoidFunction => {
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
|
||||
40
frontend/src/pages/DashboardPage/DashboardPage.tsx
Normal file
40
frontend/src/pages/DashboardPage/DashboardPage.tsx
Normal file
@@ -0,0 +1,40 @@
|
||||
import { useEffect } from 'react';
|
||||
import { Typography } from 'antd';
|
||||
import { AxiosError } from 'axios';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import DashboardContainer from 'container/DashboardContainer';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorType } from 'types/common';
|
||||
|
||||
function DashboardPage(): JSX.Element {
|
||||
const { dashboardResponse } = useDashboard();
|
||||
|
||||
const { isFetching, isError, isLoading } = dashboardResponse;
|
||||
|
||||
const errorMessage = isError
|
||||
? (dashboardResponse?.error as AxiosError<{ errorType: string }>)?.response
|
||||
?.data?.errorType
|
||||
: 'Something went wrong';
|
||||
|
||||
useEffect(() => {
|
||||
const dashboardTitle = dashboardResponse.data?.data.data.title;
|
||||
document.title = dashboardTitle || document.title;
|
||||
}, [dashboardResponse.data?.data.data.title, isFetching]);
|
||||
|
||||
if (isError && !isFetching && errorMessage === ErrorType.NotFound) {
|
||||
return <NotFound />;
|
||||
}
|
||||
|
||||
if (isError && errorMessage) {
|
||||
return <Typography>{errorMessage}</Typography>;
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
return <DashboardContainer />;
|
||||
}
|
||||
|
||||
export default DashboardPage;
|
||||
@@ -1,56 +1,16 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { Modal, Typography } from 'antd';
|
||||
import { AxiosError } from 'axios';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import DashboardContainer from 'container/DashboardContainer';
|
||||
import { useDashboardBootstrap } from 'hooks/dashboard/useDashboardBootstrap';
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { ErrorType } from 'types/common';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
function DashboardPage(): JSX.Element {
|
||||
import DashboardPage from './DashboardPage';
|
||||
|
||||
function DashboardPageWithProvider(): JSX.Element {
|
||||
const { dashboardId } = useParams<{ dashboardId: string }>();
|
||||
|
||||
const [onModal, Content] = Modal.useModal();
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
isError,
|
||||
isFetching,
|
||||
error,
|
||||
} = useDashboardBootstrap(dashboardId, { confirm: onModal.confirm });
|
||||
|
||||
const dashboardTitle = useDashboardStore(
|
||||
(s) => s.selectedDashboard?.data.title,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
document.title = dashboardTitle || document.title;
|
||||
}, [dashboardTitle]);
|
||||
|
||||
const errorMessage = isError
|
||||
? (error as AxiosError<{ errorType: string }>)?.response?.data?.errorType
|
||||
: 'Something went wrong';
|
||||
|
||||
if (isError && !isFetching && errorMessage === ErrorType.NotFound) {
|
||||
return <NotFound />;
|
||||
}
|
||||
|
||||
if (isError && errorMessage) {
|
||||
return <Typography>{errorMessage}</Typography>;
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{Content}
|
||||
<DashboardContainer />
|
||||
</>
|
||||
<DashboardProvider dashboardId={dashboardId}>
|
||||
<DashboardPage />
|
||||
</DashboardProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default DashboardPage;
|
||||
export default DashboardPageWithProvider;
|
||||
|
||||
365
frontend/src/providers/Dashboard/Dashboard.tsx
Normal file
365
frontend/src/providers/Dashboard/Dashboard.tsx
Normal file
@@ -0,0 +1,365 @@
|
||||
import {
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
createContext,
|
||||
PropsWithChildren,
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { Layout } from 'react-grid-layout';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useMutation, useQuery, UseQueryResult } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Modal } from 'antd';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import locked from 'api/v1/dashboards/id/lock';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import { useTransformDashboardVariables } from 'hooks/dashboard/useTransformDashboardVariables';
|
||||
import useTabVisibility from 'hooks/useTabFocus';
|
||||
import { getUpdatedLayout } from 'lib/dashboard/getUpdatedLayout';
|
||||
import { getMinMaxForSelectedTime } from 'lib/getMinMax';
|
||||
import { defaultTo } from 'lodash-es';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import isUndefined from 'lodash-es/isUndefined';
|
||||
import omitBy from 'lodash-es/omitBy';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
DASHBOARD_CACHE_TIME,
|
||||
DASHBOARD_CACHE_TIME_ON_REFRESH_ENABLED,
|
||||
} from '../../constants/queryCacheTime';
|
||||
import { useDashboardVariablesSelector } from '../../hooks/dashboard/useDashboardVariables';
|
||||
import {
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from './store/dashboardVariables/dashboardVariablesStore';
|
||||
import { IDashboardContext, WidgetColumnWidths } from './types';
|
||||
import { sortLayout } from './util';
|
||||
|
||||
export const DashboardContext = createContext<IDashboardContext>({
|
||||
isDashboardLocked: false,
|
||||
handleDashboardLockToggle: () => {},
|
||||
dashboardResponse: {} as UseQueryResult<
|
||||
SuccessResponseV2<Dashboard>,
|
||||
APIError
|
||||
>,
|
||||
selectedDashboard: {} as Dashboard,
|
||||
layouts: [],
|
||||
panelMap: {},
|
||||
setPanelMap: () => {},
|
||||
|
||||
setLayouts: () => {},
|
||||
setSelectedDashboard: () => {},
|
||||
updatedTimeRef: {} as React.MutableRefObject<Dayjs | null>,
|
||||
updateLocalStorageDashboardVariables: () => {},
|
||||
dashboardQueryRangeCalled: false,
|
||||
setDashboardQueryRangeCalled: () => {},
|
||||
isDashboardFetching: false,
|
||||
columnWidths: {},
|
||||
setColumnWidths: () => {},
|
||||
});
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function DashboardProvider({
|
||||
children,
|
||||
dashboardId,
|
||||
}: PropsWithChildren<{ dashboardId: string }>): JSX.Element {
|
||||
const [isDashboardLocked, setIsDashboardLocked] = useState<boolean>(false);
|
||||
|
||||
const [
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const [onModal, Content] = Modal.useModal();
|
||||
|
||||
const [layouts, setLayouts] = useState<Layout[]>([]);
|
||||
|
||||
const [panelMap, setPanelMap] = useState<
|
||||
Record<string, { widgets: Layout[]; collapsed: boolean }>
|
||||
>({});
|
||||
|
||||
const { isLoggedIn } = useAppContext();
|
||||
|
||||
const [selectedDashboard, setSelectedDashboard] = useState<Dashboard>();
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
const savedDashboardId = useDashboardVariablesSelector((s) => s.dashboardId);
|
||||
|
||||
useEffect(() => {
|
||||
const existingVariables = dashboardVariables;
|
||||
const updatedVariables = selectedDashboard?.data.variables || {};
|
||||
|
||||
if (savedDashboardId !== dashboardId) {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables: updatedVariables,
|
||||
});
|
||||
} else if (!isEqual(existingVariables, updatedVariables)) {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables: updatedVariables,
|
||||
});
|
||||
}
|
||||
}, [selectedDashboard]);
|
||||
|
||||
const {
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
transformDashboardVariables,
|
||||
} = useTransformDashboardVariables(dashboardId);
|
||||
|
||||
const updatedTimeRef = useRef<Dayjs | null>(null); // Using ref to store the updated time
|
||||
const modalRef = useRef<any>(null);
|
||||
|
||||
const isVisible = useTabVisibility();
|
||||
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
const dashboardRef = useRef<Dashboard>();
|
||||
|
||||
const [isDashboardFetching, setIsDashboardFetching] = useState<boolean>(false);
|
||||
|
||||
const dashboardResponse = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
dashboardId,
|
||||
globalTime.isAutoRefreshDisabled,
|
||||
],
|
||||
{
|
||||
enabled: !!dashboardId && isLoggedIn,
|
||||
queryFn: async () => {
|
||||
setIsDashboardFetching(true);
|
||||
try {
|
||||
return await getDashboard({
|
||||
id: dashboardId,
|
||||
});
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
return;
|
||||
} finally {
|
||||
setIsDashboardFetching(false);
|
||||
}
|
||||
},
|
||||
refetchOnWindowFocus: false,
|
||||
cacheTime: globalTime.isAutoRefreshDisabled
|
||||
? DASHBOARD_CACHE_TIME
|
||||
: DASHBOARD_CACHE_TIME_ON_REFRESH_ENABLED,
|
||||
onError: (error) => {
|
||||
showErrorModal(error as APIError);
|
||||
},
|
||||
|
||||
onSuccess: (data: SuccessResponseV2<Dashboard>) => {
|
||||
const updatedDashboardData = transformDashboardVariables(data?.data);
|
||||
|
||||
// initialize URL variables after dashboard state is set to avoid race conditions
|
||||
const variables = updatedDashboardData?.data?.variables;
|
||||
if (variables) {
|
||||
initializeDefaultVariables(variables, getUrlVariables, updateUrlVariable);
|
||||
}
|
||||
|
||||
const updatedDate = dayjs(updatedDashboardData?.updatedAt);
|
||||
|
||||
setIsDashboardLocked(updatedDashboardData?.locked || false);
|
||||
|
||||
// on first render
|
||||
if (updatedTimeRef.current === null) {
|
||||
setSelectedDashboard(updatedDashboardData);
|
||||
|
||||
updatedTimeRef.current = updatedDate;
|
||||
|
||||
dashboardRef.current = updatedDashboardData;
|
||||
|
||||
setLayouts(
|
||||
sortLayout(getUpdatedLayout(updatedDashboardData?.data.layout)),
|
||||
);
|
||||
|
||||
setPanelMap(defaultTo(updatedDashboardData?.data?.panelMap, {}));
|
||||
}
|
||||
|
||||
if (
|
||||
updatedTimeRef.current !== null &&
|
||||
updatedDate.isAfter(updatedTimeRef.current) &&
|
||||
isVisible &&
|
||||
dashboardRef.current?.id === updatedDashboardData?.id
|
||||
) {
|
||||
// show modal when state is out of sync
|
||||
const modal = onModal.confirm({
|
||||
centered: true,
|
||||
title: t('dashboard_has_been_updated'),
|
||||
content: t('do_you_want_to_refresh_the_dashboard'),
|
||||
onOk() {
|
||||
setSelectedDashboard(updatedDashboardData);
|
||||
|
||||
const { maxTime, minTime } = getMinMaxForSelectedTime(
|
||||
globalTime.selectedTime,
|
||||
globalTime.minTime,
|
||||
globalTime.maxTime,
|
||||
);
|
||||
|
||||
dispatch({
|
||||
type: UPDATE_TIME_INTERVAL,
|
||||
payload: {
|
||||
maxTime,
|
||||
minTime,
|
||||
selectedTime: globalTime.selectedTime,
|
||||
},
|
||||
});
|
||||
|
||||
dashboardRef.current = updatedDashboardData;
|
||||
|
||||
updatedTimeRef.current = dayjs(updatedDashboardData?.updatedAt);
|
||||
|
||||
setLayouts(
|
||||
sortLayout(getUpdatedLayout(updatedDashboardData?.data.layout)),
|
||||
);
|
||||
|
||||
setPanelMap(defaultTo(updatedDashboardData?.data.panelMap, {}));
|
||||
},
|
||||
});
|
||||
|
||||
modalRef.current = modal;
|
||||
} else {
|
||||
// normal flow
|
||||
updatedTimeRef.current = dayjs(updatedDashboardData?.updatedAt);
|
||||
|
||||
dashboardRef.current = updatedDashboardData;
|
||||
|
||||
if (!isEqual(selectedDashboard, updatedDashboardData)) {
|
||||
setSelectedDashboard(updatedDashboardData);
|
||||
}
|
||||
|
||||
if (
|
||||
!isEqual(
|
||||
[omitBy(layouts, (value): boolean => isUndefined(value))[0]],
|
||||
updatedDashboardData?.data.layout,
|
||||
)
|
||||
) {
|
||||
setLayouts(
|
||||
sortLayout(getUpdatedLayout(updatedDashboardData?.data.layout)),
|
||||
);
|
||||
|
||||
setPanelMap(defaultTo(updatedDashboardData?.data.panelMap, {}));
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// make the call on tab visibility only if the user is on dashboard / widget page
|
||||
if (isVisible && updatedTimeRef.current && !!dashboardId) {
|
||||
dashboardResponse.refetch();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isVisible]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isVisible && modalRef.current) {
|
||||
modalRef.current.destroy();
|
||||
}
|
||||
}, [isVisible]);
|
||||
|
||||
const { mutate: lockDashboard } = useMutation(locked, {
|
||||
onSuccess: (_, props) => {
|
||||
setIsDashboardLocked(props.lock);
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(error as APIError);
|
||||
},
|
||||
});
|
||||
|
||||
const handleDashboardLockToggle = async (value: boolean): Promise<void> => {
|
||||
if (selectedDashboard) {
|
||||
try {
|
||||
await lockDashboard({
|
||||
id: selectedDashboard.id,
|
||||
lock: value,
|
||||
});
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const [columnWidths, setColumnWidths] = useState<WidgetColumnWidths>({});
|
||||
|
||||
const value: IDashboardContext = useMemo(
|
||||
() => ({
|
||||
isDashboardLocked,
|
||||
handleDashboardLockToggle,
|
||||
dashboardResponse,
|
||||
selectedDashboard,
|
||||
dashboardId,
|
||||
layouts,
|
||||
panelMap,
|
||||
setLayouts,
|
||||
setPanelMap,
|
||||
setSelectedDashboard,
|
||||
updatedTimeRef,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
isDashboardFetching,
|
||||
columnWidths,
|
||||
setColumnWidths,
|
||||
}),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
isDashboardLocked,
|
||||
dashboardResponse,
|
||||
selectedDashboard,
|
||||
dashboardId,
|
||||
layouts,
|
||||
panelMap,
|
||||
updateLocalStorageDashboardVariables,
|
||||
currentDashboard,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
isDashboardFetching,
|
||||
columnWidths,
|
||||
setColumnWidths,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<DashboardContext.Provider value={value}>
|
||||
{Content}
|
||||
{children}
|
||||
</DashboardContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export const useDashboard = (): IDashboardContext => {
|
||||
const context = useContext(DashboardContext);
|
||||
|
||||
if (!context) {
|
||||
throw new Error('Should be used inside the context');
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
@@ -1,4 +1,3 @@
|
||||
import { ReactNode } from 'react';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useSelector } from 'react-redux';
|
||||
@@ -7,20 +6,7 @@ import { render, RenderResult, screen, waitFor } from '@testing-library/react';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import { DASHBOARD_CACHE_TIME_ON_REFRESH_ENABLED } from 'constants/queryCacheTime';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useDashboardBootstrap } from 'hooks/dashboard/useDashboardBootstrap';
|
||||
|
||||
function DashboardBootstrapWrapper({
|
||||
dashboardId,
|
||||
children,
|
||||
}: {
|
||||
dashboardId: string;
|
||||
children: ReactNode;
|
||||
}): JSX.Element {
|
||||
useDashboardBootstrap(dashboardId);
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <>{children}</>;
|
||||
}
|
||||
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
|
||||
import { DashboardProvider, useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useDashboardVariables } from '../../../hooks/dashboard/useDashboardVariables';
|
||||
@@ -69,12 +55,17 @@ jest.mock('react-redux', () => ({
|
||||
jest.mock('uuid', () => ({ v4: jest.fn(() => 'mock-uuid') }));
|
||||
|
||||
function TestComponent(): JSX.Element {
|
||||
const { selectedDashboard } = useDashboardStore();
|
||||
const { dashboardResponse, selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div data-testid="dashboard-id">{selectedDashboard?.id}</div>
|
||||
<div data-testid="query-status">{dashboardResponse.status}</div>
|
||||
<div data-testid="is-loading">{dashboardResponse.isLoading.toString()}</div>
|
||||
<div data-testid="is-fetching">
|
||||
{dashboardResponse.isFetching.toString()}
|
||||
</div>
|
||||
<div data-testid="dashboard-variables">
|
||||
{dashboardVariables ? JSON.stringify(dashboardVariables) : 'null'}
|
||||
</div>
|
||||
@@ -98,7 +89,7 @@ function createTestQueryClient(): QueryClient {
|
||||
}
|
||||
|
||||
// Helper to render with dashboard provider
|
||||
function renderWithDashboardBootstrap(
|
||||
function renderWithDashboardProvider(
|
||||
dashboardId = 'test-dashboard-id',
|
||||
): RenderResult {
|
||||
const queryClient = createTestQueryClient();
|
||||
@@ -107,9 +98,9 @@ function renderWithDashboardBootstrap(
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[initialRoute]}>
|
||||
<DashboardBootstrapWrapper dashboardId={dashboardId}>
|
||||
<DashboardProvider dashboardId={dashboardId}>
|
||||
<TestComponent />
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
@@ -181,7 +172,7 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
describe('Query Key Behavior', () => {
|
||||
it('should include route params in query key when on dashboard page', async () => {
|
||||
const dashboardId = 'test-dashboard-id';
|
||||
renderWithDashboardBootstrap(dashboardId);
|
||||
renderWithDashboardProvider(dashboardId);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: dashboardId });
|
||||
@@ -196,7 +187,7 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
const newDashboardId = 'new-dashboard-id';
|
||||
|
||||
// First render with initial dashboard ID
|
||||
const { rerender } = renderWithDashboardBootstrap(initialDashboardId);
|
||||
const { rerender } = renderWithDashboardProvider(initialDashboardId);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: initialDashboardId });
|
||||
@@ -206,9 +197,9 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
rerender(
|
||||
<QueryClientProvider client={createTestQueryClient()}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${newDashboardId}`]}>
|
||||
<DashboardBootstrapWrapper dashboardId={newDashboardId}>
|
||||
<DashboardProvider dashboardId={newDashboardId}>
|
||||
<TestComponent />
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
@@ -222,7 +213,7 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
});
|
||||
|
||||
it('should not fetch when no dashboardId is provided', () => {
|
||||
renderWithDashboardBootstrap('');
|
||||
renderWithDashboardProvider('');
|
||||
|
||||
// Should not call the API
|
||||
expect(mockGetDashboard).not.toHaveBeenCalled();
|
||||
@@ -238,9 +229,9 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
const { rerender } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${dashboardId1}`]}>
|
||||
<DashboardBootstrapWrapper dashboardId={dashboardId1}>
|
||||
<DashboardProvider dashboardId={dashboardId1}>
|
||||
<TestComponent />
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
@@ -252,9 +243,9 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
rerender(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${dashboardId2}`]}>
|
||||
<DashboardBootstrapWrapper dashboardId={dashboardId2}>
|
||||
<DashboardProvider dashboardId={dashboardId2}>
|
||||
<TestComponent />
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
@@ -295,9 +286,9 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${dashboardId}`]}>
|
||||
<DashboardBootstrapWrapper dashboardId={dashboardId}>
|
||||
<DashboardProvider dashboardId={dashboardId}>
|
||||
<TestComponent />
|
||||
</DashboardBootstrapWrapper>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
@@ -374,7 +365,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
// Empty URL variables - tests initialization flow
|
||||
mockGetUrlVariables.mockReturnValue({});
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -430,7 +421,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
.mockReturnValueOnce('development')
|
||||
.mockReturnValueOnce(['db', 'cache']);
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -490,7 +481,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
|
||||
mockGetUrlVariables.mockReturnValue(urlVariables);
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -526,7 +517,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
.mockReturnValueOnce('development')
|
||||
.mockReturnValueOnce(['api']);
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
// Verify normalization was called with the specific values and variable configs
|
||||
@@ -593,7 +584,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -635,7 +626,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -678,7 +669,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -720,7 +711,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardBootstrap(DASHBOARD_ID);
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import type { Layout } from 'react-grid-layout';
|
||||
import type { StateCreator } from 'zustand';
|
||||
|
||||
import type { DashboardStore } from '../useDashboardStore';
|
||||
|
||||
export interface DashboardLayoutSlice {
|
||||
//
|
||||
layouts: Layout[];
|
||||
setLayouts: (updater: Layout[] | ((prev: Layout[]) => Layout[])) => void;
|
||||
//
|
||||
panelMap: Record<string, { widgets: Layout[]; collapsed: boolean }>;
|
||||
setPanelMap: (
|
||||
updater:
|
||||
| Record<string, { widgets: Layout[]; collapsed: boolean }>
|
||||
| ((
|
||||
prev: Record<string, { widgets: Layout[]; collapsed: boolean }>,
|
||||
) => Record<string, { widgets: Layout[]; collapsed: boolean }>),
|
||||
) => void;
|
||||
// resetDashboardLayout: () => void;
|
||||
}
|
||||
|
||||
export const initialDashboardLayoutState = {
|
||||
layouts: [] as Layout[],
|
||||
panelMap: {} as Record<string, { widgets: Layout[]; collapsed: boolean }>,
|
||||
};
|
||||
|
||||
export const createDashboardLayoutSlice: StateCreator<
|
||||
DashboardStore,
|
||||
[['zustand/immer', never]],
|
||||
[],
|
||||
DashboardLayoutSlice
|
||||
> = (set) => ({
|
||||
...initialDashboardLayoutState,
|
||||
|
||||
setLayouts: (updater): void =>
|
||||
set((state) => {
|
||||
state.layouts =
|
||||
typeof updater === 'function' ? updater(state.layouts) : updater;
|
||||
}),
|
||||
|
||||
setPanelMap: (updater): void =>
|
||||
set((state) => {
|
||||
state.panelMap =
|
||||
typeof updater === 'function' ? updater(state.panelMap) : updater;
|
||||
}),
|
||||
|
||||
// resetDashboardLayout: () =>
|
||||
// set((state) => {
|
||||
// Object.assign(state, initialDashboardLayoutState);
|
||||
// }),
|
||||
});
|
||||
@@ -1,57 +0,0 @@
|
||||
import type { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import type { StateCreator } from 'zustand';
|
||||
|
||||
import type { DashboardStore } from '../useDashboardStore';
|
||||
|
||||
export type WidgetColumnWidths = {
|
||||
[widgetId: string]: Record<string, number>;
|
||||
};
|
||||
|
||||
export interface DashboardUISlice {
|
||||
//
|
||||
selectedDashboard: Dashboard | undefined;
|
||||
setSelectedDashboard: (
|
||||
updater:
|
||||
| Dashboard
|
||||
| undefined
|
||||
| ((prev: Dashboard | undefined) => Dashboard | undefined),
|
||||
) => void;
|
||||
//
|
||||
columnWidths: WidgetColumnWidths;
|
||||
setColumnWidths: (
|
||||
updater:
|
||||
| WidgetColumnWidths
|
||||
| ((prev: WidgetColumnWidths) => WidgetColumnWidths),
|
||||
) => void;
|
||||
}
|
||||
|
||||
export const initialDashboardUIState = {
|
||||
selectedDashboard: undefined as Dashboard | undefined,
|
||||
columnWidths: {} as WidgetColumnWidths,
|
||||
};
|
||||
|
||||
export const createDashboardUISlice: StateCreator<
|
||||
DashboardStore,
|
||||
[['zustand/immer', never]],
|
||||
[],
|
||||
DashboardUISlice
|
||||
> = (set) => ({
|
||||
...initialDashboardUIState,
|
||||
|
||||
setSelectedDashboard: (updater): void =>
|
||||
set((state: DashboardUISlice): void => {
|
||||
state.selectedDashboard =
|
||||
typeof updater === 'function' ? updater(state.selectedDashboard) : updater;
|
||||
}),
|
||||
|
||||
setColumnWidths: (updater): void =>
|
||||
set((state: DashboardUISlice): void => {
|
||||
state.columnWidths =
|
||||
typeof updater === 'function' ? updater(state.columnWidths) : updater;
|
||||
}),
|
||||
|
||||
resetDashboardUI: (): void =>
|
||||
set((state: DashboardUISlice): void => {
|
||||
Object.assign(state, initialDashboardUIState);
|
||||
}),
|
||||
});
|
||||
@@ -1,50 +0,0 @@
|
||||
import type { Layout } from 'react-grid-layout';
|
||||
import type { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import { create } from 'zustand';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
|
||||
import {
|
||||
createDashboardLayoutSlice,
|
||||
DashboardLayoutSlice,
|
||||
initialDashboardLayoutState,
|
||||
} from './slices/dashboardLayoutSlice';
|
||||
import {
|
||||
createDashboardUISlice,
|
||||
DashboardUISlice,
|
||||
initialDashboardUIState,
|
||||
} from './slices/dashboardUISlice';
|
||||
|
||||
export type DashboardStore = DashboardUISlice &
|
||||
DashboardLayoutSlice & {
|
||||
resetDashboardStore: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* 'select*' is a redux naming convention that can be carried over to zustand.
|
||||
* It is used to select a piece of state from the store.
|
||||
* In this case, we are selecting the locked state of the selected dashboard.
|
||||
* */
|
||||
export const selectIsDashboardLocked = (s: DashboardStore): boolean =>
|
||||
s.selectedDashboard?.locked ?? false;
|
||||
|
||||
export const useDashboardStore = create<DashboardStore>()(
|
||||
immer((set, get, api) => ({
|
||||
...createDashboardUISlice(set, get, api),
|
||||
...createDashboardLayoutSlice(set, get, api),
|
||||
|
||||
resetDashboardStore: (): void =>
|
||||
set((state: DashboardStore) => {
|
||||
Object.assign(state, initialDashboardUIState, initialDashboardLayoutState);
|
||||
}),
|
||||
})),
|
||||
);
|
||||
|
||||
// Standalone imperative accessors — use these instead of calling useDashboardStore.getState() at call sites.
|
||||
export const getSelectedDashboard = (): Dashboard | undefined =>
|
||||
useDashboardStore.getState().selectedDashboard;
|
||||
|
||||
export const getDashboardLayouts = (): Layout[] =>
|
||||
useDashboardStore.getState().layouts;
|
||||
|
||||
export const resetDashboard = (): void =>
|
||||
useDashboardStore.getState().resetDashboardStore();
|
||||
41
frontend/src/providers/Dashboard/types.ts
Normal file
41
frontend/src/providers/Dashboard/types.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Layout } from 'react-grid-layout';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import dayjs from 'dayjs';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
|
||||
export type WidgetColumnWidths = {
|
||||
[widgetId: string]: Record<string, number>;
|
||||
};
|
||||
|
||||
export interface IDashboardContext {
|
||||
isDashboardLocked: boolean;
|
||||
handleDashboardLockToggle: (value: boolean) => void;
|
||||
dashboardResponse: UseQueryResult<SuccessResponseV2<Dashboard>, unknown>;
|
||||
selectedDashboard: Dashboard | undefined;
|
||||
layouts: Layout[];
|
||||
panelMap: Record<string, { widgets: Layout[]; collapsed: boolean }>;
|
||||
setPanelMap: React.Dispatch<React.SetStateAction<Record<string, any>>>;
|
||||
setLayouts: React.Dispatch<React.SetStateAction<Layout[]>>;
|
||||
setSelectedDashboard: React.Dispatch<
|
||||
React.SetStateAction<Dashboard | undefined>
|
||||
>;
|
||||
updatedTimeRef: React.MutableRefObject<dayjs.Dayjs | null>;
|
||||
updateLocalStorageDashboardVariables: (
|
||||
id: string,
|
||||
selectedValue:
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| (string | number | boolean)[]
|
||||
| null
|
||||
| undefined,
|
||||
allSelected: boolean,
|
||||
isDynamic?: boolean,
|
||||
) => void;
|
||||
dashboardQueryRangeCalled: boolean;
|
||||
setDashboardQueryRangeCalled: (value: boolean) => void;
|
||||
isDashboardFetching: boolean;
|
||||
columnWidths: WidgetColumnWidths;
|
||||
setColumnWidths: React.Dispatch<React.SetStateAction<WidgetColumnWidths>>;
|
||||
}
|
||||
4
go.mod
4
go.mod
@@ -81,8 +81,6 @@ require (
|
||||
golang.org/x/oauth2 v0.34.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.33.0
|
||||
gonum.org/v1/gonum v0.17.0
|
||||
google.golang.org/api v0.265.0
|
||||
google.golang.org/protobuf v1.36.11
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
@@ -379,6 +377,8 @@ require (
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
gonum.org/v1/gonum v0.17.0 // indirect
|
||||
google.golang.org/api v0.265.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260203192932-546029d2fa20 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409 // indirect
|
||||
google.golang.org/grpc v1.78.0 // indirect
|
||||
|
||||
216
pkg/apiserver/signozapiserver/cloudintegration.go
Normal file
216
pkg/apiserver/signozapiserver/cloudintegration.go
Normal file
@@ -0,0 +1,216 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
citypes "github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/connection_artifact", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.GetConnectionArtifact),
|
||||
handler.OpenAPIDef{
|
||||
ID: "GetConnectionArtifact",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Get connection artifact",
|
||||
Description: "This endpoint returns a connection artifact for the specified cloud provider and creates new cloud integration account",
|
||||
Request: new(citypes.PostableConnectionArtifact),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.GettableConnectionArtifact),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.ListAccounts),
|
||||
handler.OpenAPIDef{
|
||||
ID: "ListAccounts",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "List accounts",
|
||||
Description: "This endpoint lists the accounts for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(citypes.GettableAccounts),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.GetAccount),
|
||||
handler.OpenAPIDef{
|
||||
ID: "GetAccount",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Get account",
|
||||
Description: "This endpoint gets an account for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(citypes.GettableAccount),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.UpdateAccount),
|
||||
handler.OpenAPIDef{
|
||||
ID: "UpdateAccount",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Update account",
|
||||
Description: "This endpoint updates an account for the specified cloud provider",
|
||||
Request: new(citypes.UpdatableAccount),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.DisconnectAccount),
|
||||
handler.OpenAPIDef{
|
||||
ID: "DisconnectAccount",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Disconnect account",
|
||||
Description: "This endpoint disconnects an account for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/services", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.ListServicesMetadata),
|
||||
handler.OpenAPIDef{
|
||||
ID: "ListServicesMetadata",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "List services metadata",
|
||||
Description: "This endpoint lists the services metadata for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(citypes.GettableServicesMetadata),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/services/{service_id}", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.GetService),
|
||||
handler.OpenAPIDef{
|
||||
ID: "GetService",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Get service",
|
||||
Description: "This endpoint gets a service for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(citypes.GettableService),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/services/{service_id}", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.UpdateService),
|
||||
handler.OpenAPIDef{
|
||||
ID: "UpdateService",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Update service",
|
||||
Description: "This endpoint updates a service for the specified cloud provider",
|
||||
Request: new(citypes.UpdatableService),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Agent check-in endpoint is kept same as older one to maintain backward compatibility with already deployed agents.
|
||||
// In the future, this endpoint will be deprecated and a new endpoint will be introduced for consistency with above endpoints.
|
||||
if err := router.Handle("/api/v1/cloud-integrations/{cloud_provider}/agent-check-in", handler.New(
|
||||
provider.authZ.ViewAccess(provider.cloudIntegrationHandler.AgentCheckIn),
|
||||
handler.OpenAPIDef{
|
||||
ID: "AgentCheckInDeprecated",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Agent check-in",
|
||||
Description: "[Deprecated] This endpoint is called by the deployed agent to check in",
|
||||
Request: new(citypes.PostableAgentCheckInRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.GettableAgentCheckInResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: true, // this endpoint will be deprecated in future
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer), // agent role is viewer
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/check_in", handler.New(
|
||||
provider.authZ.ViewAccess(provider.cloudIntegrationHandler.AgentCheckIn),
|
||||
handler.OpenAPIDef{
|
||||
ID: "AgentCheckIn",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Agent check-in",
|
||||
Description: "This endpoint is called by the deployed agent to check in",
|
||||
Request: new(citypes.PostableAgentCheckInRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.GettableAgentCheckInResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer), // agent role is viewer
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -12,13 +12,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -48,11 +48,11 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
rawDataExportHandler rawdataexport.Handler
|
||||
zeusHandler zeus.Handler
|
||||
querierHandler querier.Handler
|
||||
serviceAccountHandler serviceaccount.Handler
|
||||
factoryHandler factory.Handler
|
||||
// TODO: wire up later
|
||||
cloudIntegrationHandler cloudintegration.Handler //nolint:unused
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -72,11 +72,10 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
serviceAccountHandler serviceaccount.Handler,
|
||||
factoryHandler factory.Handler,
|
||||
cloudIntegrationHandler cloudintegration.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -99,11 +98,10 @@ func NewFactory(
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
rawDataExportHandler,
|
||||
zeusHandler,
|
||||
querierHandler,
|
||||
serviceAccountHandler,
|
||||
factoryHandler,
|
||||
cloudIntegrationHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -128,38 +126,36 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
rawDataExportHandler rawdataexport.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
serviceAccountHandler serviceaccount.Handler,
|
||||
factoryHandler factory.Handler,
|
||||
cloudIntegrationHandler cloudintegration.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
|
||||
provider := &provider{
|
||||
config: config,
|
||||
settings: settings,
|
||||
router: router,
|
||||
orgHandler: orgHandler,
|
||||
userHandler: userHandler,
|
||||
sessionHandler: sessionHandler,
|
||||
authDomainHandler: authDomainHandler,
|
||||
preferenceHandler: preferenceHandler,
|
||||
globalHandler: globalHandler,
|
||||
promoteHandler: promoteHandler,
|
||||
flaggerHandler: flaggerHandler,
|
||||
dashboardModule: dashboardModule,
|
||||
dashboardHandler: dashboardHandler,
|
||||
metricsExplorerHandler: metricsExplorerHandler,
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
rawDataExportHandler: rawDataExportHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
serviceAccountHandler: serviceAccountHandler,
|
||||
factoryHandler: factoryHandler,
|
||||
config: config,
|
||||
settings: settings,
|
||||
router: router,
|
||||
orgHandler: orgHandler,
|
||||
userHandler: userHandler,
|
||||
sessionHandler: sessionHandler,
|
||||
authDomainHandler: authDomainHandler,
|
||||
preferenceHandler: preferenceHandler,
|
||||
globalHandler: globalHandler,
|
||||
promoteHandler: promoteHandler,
|
||||
flaggerHandler: flaggerHandler,
|
||||
dashboardModule: dashboardModule,
|
||||
dashboardHandler: dashboardHandler,
|
||||
metricsExplorerHandler: metricsExplorerHandler,
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
serviceAccountHandler: serviceAccountHandler,
|
||||
cloudIntegrationHandler: cloudIntegrationHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -232,10 +228,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addRawDataExportRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addZeusRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -248,7 +240,7 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addRegistryRoutes(router); err != nil {
|
||||
if err := provider.addCloudIntegrationRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
v5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addRawDataExportRoutes(router *mux.Router) error {
|
||||
|
||||
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
|
||||
ID: "HandleExportRawDataPOST",
|
||||
Tags: []string{"logs", "traces"},
|
||||
Summary: "Export raw data",
|
||||
Description: "This endpoints allows complex query exporting raw data for traces and logs",
|
||||
Request: new(v5.QueryRangeRequest),
|
||||
RequestQuery: new(exporttypes.ExportRawDataFormatQueryParam),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
pkghandler "github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/gorilla/mux"
|
||||
openapi "github.com/swaggest/openapi-go"
|
||||
)
|
||||
|
||||
type healthOpenAPIHandler struct {
|
||||
handlerFunc http.HandlerFunc
|
||||
id string
|
||||
summary string
|
||||
}
|
||||
|
||||
func newHealthOpenAPIHandler(handlerFunc http.HandlerFunc, id, summary string) pkghandler.Handler {
|
||||
return &healthOpenAPIHandler{
|
||||
handlerFunc: handlerFunc,
|
||||
id: id,
|
||||
summary: summary,
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *healthOpenAPIHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
handler.handlerFunc.ServeHTTP(rw, req)
|
||||
}
|
||||
|
||||
func (handler *healthOpenAPIHandler) ServeOpenAPI(opCtx openapi.OperationContext) {
|
||||
opCtx.SetID(handler.id)
|
||||
opCtx.SetTags("health")
|
||||
opCtx.SetSummary(handler.summary)
|
||||
|
||||
response := render.SuccessResponse{
|
||||
Status: render.StatusSuccess.String(),
|
||||
Data: new(factory.Response),
|
||||
}
|
||||
|
||||
opCtx.AddRespStructure(
|
||||
response,
|
||||
openapi.WithContentType("application/json"),
|
||||
openapi.WithHTTPStatus(http.StatusOK),
|
||||
)
|
||||
opCtx.AddRespStructure(
|
||||
response,
|
||||
openapi.WithContentType("application/json"),
|
||||
openapi.WithHTTPStatus(http.StatusServiceUnavailable),
|
||||
)
|
||||
}
|
||||
|
||||
func (provider *provider) addRegistryRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v2/healthz", newHealthOpenAPIHandler(
|
||||
provider.authZ.OpenAccess(provider.factoryHandler.Healthz),
|
||||
"Healthz",
|
||||
"Health check",
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/readyz", newHealthOpenAPIHandler(
|
||||
provider.authZ.OpenAccess(provider.factoryHandler.Readyz),
|
||||
"Readyz",
|
||||
"Readiness check",
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/livez", pkghandler.New(provider.authZ.OpenAccess(provider.factoryHandler.Livez),
|
||||
pkghandler.OpenAPIDef{
|
||||
ID: "Livez",
|
||||
Tags: []string{"health"},
|
||||
Summary: "Liveness check",
|
||||
Response: new(factory.Response),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
},
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
)
|
||||
|
||||
type AuthZ interface {
|
||||
factory.ServiceWithHealthy
|
||||
factory.Service
|
||||
|
||||
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
|
||||
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Typeable, []authtypes.Selector, []authtypes.Selector) error
|
||||
|
||||
@@ -43,10 +43,6 @@ func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.server.Start(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Healthy() <-chan struct{} {
|
||||
return provider.server.Healthy()
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.server.Stop(ctx)
|
||||
}
|
||||
|
||||
@@ -31,7 +31,6 @@ type Server struct {
|
||||
modelID string
|
||||
mtx sync.RWMutex
|
||||
stopChan chan struct{}
|
||||
healthyC chan struct{}
|
||||
}
|
||||
|
||||
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
|
||||
@@ -62,7 +61,6 @@ func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, co
|
||||
openfgaSchema: openfgaSchema,
|
||||
mtx: sync.RWMutex{},
|
||||
stopChan: make(chan struct{}),
|
||||
healthyC: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -82,16 +80,10 @@ func (server *Server) Start(ctx context.Context) error {
|
||||
server.storeID = storeID
|
||||
server.mtx.Unlock()
|
||||
|
||||
close(server.healthyC)
|
||||
|
||||
<-server.stopChan
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) Healthy() <-chan struct{} {
|
||||
return server.healthyC
|
||||
}
|
||||
|
||||
func (server *Server) Stop(ctx context.Context) error {
|
||||
server.openfgaServer.Close()
|
||||
close(server.stopChan)
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
package factory
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
)
|
||||
|
||||
// Handler provides HTTP handler functions for service health checks.
|
||||
type Handler interface {
|
||||
// Readyz reports whether services are ready.
|
||||
Readyz(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Livez reports whether services are alive.
|
||||
Livez(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Healthz reports overall service health.
|
||||
Healthz(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
type handler struct {
|
||||
registry *Registry
|
||||
}
|
||||
|
||||
func NewHandler(registry *Registry) Handler {
|
||||
return &handler{
|
||||
registry: registry,
|
||||
}
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
Healthy bool `json:"healthy"`
|
||||
Services map[State][]Name `json:"services"`
|
||||
}
|
||||
|
||||
func (handler *handler) Healthz(rw http.ResponseWriter, req *http.Request) {
|
||||
byState := handler.registry.ServicesByState()
|
||||
healthy := handler.registry.IsHealthy()
|
||||
|
||||
statusCode := http.StatusOK
|
||||
if !healthy {
|
||||
statusCode = http.StatusServiceUnavailable
|
||||
}
|
||||
|
||||
render.Success(rw, statusCode, Response{
|
||||
Healthy: healthy,
|
||||
Services: byState,
|
||||
})
|
||||
}
|
||||
|
||||
func (handler *handler) Readyz(rw http.ResponseWriter, req *http.Request) {
|
||||
healthy := handler.registry.IsHealthy()
|
||||
|
||||
statusCode := http.StatusOK
|
||||
if !healthy {
|
||||
statusCode = http.StatusServiceUnavailable
|
||||
}
|
||||
|
||||
render.Success(rw, statusCode, Response{
|
||||
Healthy: healthy,
|
||||
Services: handler.registry.ServicesByState(),
|
||||
})
|
||||
}
|
||||
|
||||
func (handler *handler) Livez(rw http.ResponseWriter, req *http.Request) {
|
||||
render.Success(rw, http.StatusOK, nil)
|
||||
}
|
||||
@@ -5,11 +5,9 @@ import (
|
||||
"regexp"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
var _ slog.LogValuer = (Name{})
|
||||
var _ jsonschema.Exposer = (Name{})
|
||||
|
||||
var (
|
||||
// nameRegex is a regex that matches a valid name.
|
||||
@@ -29,21 +27,6 @@ func (n Name) String() string {
|
||||
return n.name
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler for JSON serialization.
|
||||
func (n Name) MarshalText() ([]byte, error) {
|
||||
return []byte(n.name), nil
|
||||
}
|
||||
|
||||
// MarshalJSON implements json.Marshaler so Name serializes as a JSON string.
|
||||
func (n Name) MarshalJSON() ([]byte, error) {
|
||||
return []byte(`"` + n.name + `"`), nil
|
||||
}
|
||||
|
||||
// JSONSchema implements jsonschema.Exposer so OpenAPI reflects Name as a string.
|
||||
func (n Name) JSONSchema() (jsonschema.Schema, error) {
|
||||
return *new(jsonschema.Schema).WithType(jsonschema.String.Type()), nil
|
||||
}
|
||||
|
||||
// NewName creates a new name.
|
||||
func NewName(name string) (Name, error) {
|
||||
if !nameRegex.MatchString(name) {
|
||||
|
||||
@@ -8,26 +8,21 @@ import (
|
||||
"syscall"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"gonum.org/v1/gonum/graph/simple"
|
||||
"gonum.org/v1/gonum/graph/topo"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeInvalidRegistry = errors.MustNewCode("invalid_registry")
|
||||
ErrCodeDependencyFailed = errors.MustNewCode("dependency_failed")
|
||||
ErrCodeServiceFailed = errors.MustNewCode("service_failed")
|
||||
ErrCodeInvalidRegistry = errors.MustNewCode("invalid_registry")
|
||||
)
|
||||
|
||||
type Registry struct {
|
||||
services []*serviceWithState
|
||||
servicesByName map[Name]*serviceWithState
|
||||
logger *slog.Logger
|
||||
startC chan error
|
||||
stopC chan error
|
||||
services NamedMap[NamedService]
|
||||
logger *slog.Logger
|
||||
startCh chan error
|
||||
stopCh chan error
|
||||
}
|
||||
|
||||
// New creates a new registry of services. It needs at least one service in the input.
|
||||
func NewRegistry(ctx context.Context, logger *slog.Logger, services ...NamedService) (*Registry, error) {
|
||||
func NewRegistry(logger *slog.Logger, services ...NamedService) (*Registry, error) {
|
||||
if logger == nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidRegistry, "cannot build registry, logger is required")
|
||||
}
|
||||
@@ -36,131 +31,59 @@ func NewRegistry(ctx context.Context, logger *slog.Logger, services ...NamedServ
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidRegistry, "cannot build registry, at least one service is required")
|
||||
}
|
||||
|
||||
servicesWithState := make([]*serviceWithState, len(services))
|
||||
servicesByName := make(map[Name]*serviceWithState, len(services))
|
||||
for i, s := range services {
|
||||
if _, ok := servicesByName[s.Name()]; ok {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidRegistry, "cannot build registry, duplicate service name %q", s.Name())
|
||||
}
|
||||
ss := newServiceWithState(s)
|
||||
servicesWithState[i] = ss
|
||||
servicesByName[s.Name()] = ss
|
||||
}
|
||||
|
||||
registryLogger := logger.With(slog.String("pkg", "github.com/SigNoz/signoz/pkg/factory"))
|
||||
|
||||
for _, ss := range servicesWithState {
|
||||
for _, dep := range ss.service.DependsOn() {
|
||||
if dep == ss.service.Name() {
|
||||
registryLogger.ErrorContext(ctx, "ignoring self-dependency", slog.Any("service", ss.service.Name()))
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := servicesByName[dep]; !ok {
|
||||
registryLogger.ErrorContext(ctx, "ignoring unknown dependency", slog.Any("service", ss.service.Name()), slog.Any("dependency", dep))
|
||||
continue
|
||||
}
|
||||
|
||||
ss.dependsOn = append(ss.dependsOn, dep)
|
||||
}
|
||||
}
|
||||
|
||||
if err := detectCyclicDeps(servicesWithState); err != nil {
|
||||
m, err := NewNamedMap(services...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Registry{
|
||||
logger: registryLogger,
|
||||
services: servicesWithState,
|
||||
servicesByName: servicesByName,
|
||||
startC: make(chan error, 1),
|
||||
stopC: make(chan error, len(services)),
|
||||
logger: logger.With(slog.String("pkg", "go.signoz.io/pkg/factory")),
|
||||
services: m,
|
||||
startCh: make(chan error, 1),
|
||||
stopCh: make(chan error, len(services)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (registry *Registry) Start(ctx context.Context) {
|
||||
for _, ss := range registry.services {
|
||||
go func(ss *serviceWithState) {
|
||||
// Wait for all dependencies to be healthy before starting.
|
||||
for _, dep := range ss.dependsOn {
|
||||
depState := registry.servicesByName[dep]
|
||||
registry.logger.InfoContext(ctx, "service waiting for dependency", slog.Any("service", ss.service.Name()), slog.Any("dependency", dep))
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
ss.mu.Lock()
|
||||
ss.state = StateFailed
|
||||
ss.startErr = ctx.Err()
|
||||
ss.mu.Unlock()
|
||||
close(ss.startReturnedC)
|
||||
registry.startC <- ctx.Err()
|
||||
return
|
||||
case <-depState.healthyC:
|
||||
// Dependency is healthy, continue.
|
||||
case <-depState.startReturnedC:
|
||||
// Dependency failed before becoming healthy.
|
||||
err := errors.Newf(errors.TypeInternal, ErrCodeDependencyFailed, "dependency %q of service %q failed", dep, ss.service.Name())
|
||||
ss.mu.Lock()
|
||||
ss.state = StateFailed
|
||||
ss.startErr = err
|
||||
ss.mu.Unlock()
|
||||
close(ss.startReturnedC)
|
||||
registry.startC <- err
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
registry.logger.InfoContext(ctx, "starting service", slog.Any("service", ss.service.Name()))
|
||||
|
||||
go func() {
|
||||
select {
|
||||
case <-ss.service.Healthy():
|
||||
ss.setState(StateRunning)
|
||||
case <-ss.startReturnedC:
|
||||
}
|
||||
}()
|
||||
|
||||
err := ss.service.Start(ctx)
|
||||
if err != nil {
|
||||
ss.mu.Lock()
|
||||
ss.state = StateFailed
|
||||
ss.startErr = err
|
||||
ss.mu.Unlock()
|
||||
}
|
||||
close(ss.startReturnedC)
|
||||
registry.startC <- err
|
||||
}(ss)
|
||||
func (r *Registry) Start(ctx context.Context) {
|
||||
for _, s := range r.services.GetInOrder() {
|
||||
go func(s NamedService) {
|
||||
r.logger.InfoContext(ctx, "starting service", slog.Any("service", s.Name()))
|
||||
err := s.Start(ctx)
|
||||
r.startCh <- err
|
||||
}(s)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (registry *Registry) Wait(ctx context.Context) error {
|
||||
func (r *Registry) Wait(ctx context.Context) error {
|
||||
interrupt := make(chan os.Signal, 1)
|
||||
signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
registry.logger.InfoContext(ctx, "caught context error, exiting", errors.Attr(ctx.Err()))
|
||||
r.logger.InfoContext(ctx, "caught context error, exiting", errors.Attr(ctx.Err()))
|
||||
case s := <-interrupt:
|
||||
registry.logger.InfoContext(ctx, "caught interrupt signal, exiting", slog.Any("signal", s))
|
||||
case err := <-registry.startC:
|
||||
registry.logger.ErrorContext(ctx, "caught service error, exiting", errors.Attr(err))
|
||||
r.logger.InfoContext(ctx, "caught interrupt signal, exiting", slog.Any("signal", s))
|
||||
case err := <-r.startCh:
|
||||
r.logger.ErrorContext(ctx, "caught service error, exiting", errors.Attr(err))
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (registry *Registry) Stop(ctx context.Context) error {
|
||||
for _, ss := range registry.services {
|
||||
go func(ss *serviceWithState) {
|
||||
registry.logger.InfoContext(ctx, "stopping service", slog.Any("service", ss.service.Name()))
|
||||
err := ss.service.Stop(ctx)
|
||||
registry.stopC <- err
|
||||
}(ss)
|
||||
func (r *Registry) Stop(ctx context.Context) error {
|
||||
for _, s := range r.services.GetInOrder() {
|
||||
go func(s NamedService) {
|
||||
r.logger.InfoContext(ctx, "stopping service", slog.Any("service", s.Name()))
|
||||
err := s.Stop(ctx)
|
||||
r.stopCh <- err
|
||||
}(s)
|
||||
}
|
||||
|
||||
errs := make([]error, len(registry.services))
|
||||
for i := 0; i < len(registry.services); i++ {
|
||||
err := <-registry.stopC
|
||||
errs := make([]error, len(r.services.GetInOrder()))
|
||||
for i := 0; i < len(r.services.GetInOrder()); i++ {
|
||||
err := <-r.stopCh
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
@@ -168,83 +91,3 @@ func (registry *Registry) Stop(ctx context.Context) error {
|
||||
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
// AwaitHealthy blocks until all services reach the RUNNING state or any service fails.
|
||||
func (registry *Registry) AwaitHealthy(ctx context.Context) error {
|
||||
for _, ss := range registry.services {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-ss.healthyC:
|
||||
case <-ss.startReturnedC:
|
||||
ss.mu.RLock()
|
||||
err := ss.startErr
|
||||
ss.mu.RUnlock()
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, ErrCodeServiceFailed, "service %q failed before becoming healthy", ss.service.Name())
|
||||
}
|
||||
return errors.Newf(errors.TypeInternal, ErrCodeServiceFailed, "service %q terminated before becoming healthy", ss.service.Name())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ServicesByState returns a snapshot of the current state of all services.
|
||||
func (registry *Registry) ServicesByState() map[State][]Name {
|
||||
result := make(map[State][]Name)
|
||||
for _, ss := range registry.services {
|
||||
state := ss.getState()
|
||||
result[state] = append(result[state], ss.service.Name())
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// IsHealthy returns true if all services are in the RUNNING state.
|
||||
func (registry *Registry) IsHealthy() bool {
|
||||
for _, ss := range registry.services {
|
||||
if ss.getState() != StateRunning {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// detectCyclicDeps returns an error listing all dependency cycles found using
|
||||
// gonum's Tarjan SCC algorithm.
|
||||
func detectCyclicDeps(services []*serviceWithState) error {
|
||||
nameToID := make(map[Name]int64, len(services))
|
||||
idToName := make(map[int64]Name, len(services))
|
||||
for i, ss := range services {
|
||||
id := int64(i)
|
||||
nameToID[ss.service.Name()] = id
|
||||
idToName[id] = ss.service.Name()
|
||||
}
|
||||
|
||||
g := simple.NewDirectedGraph()
|
||||
for _, ss := range services {
|
||||
g.AddNode(simple.Node(nameToID[ss.service.Name()]))
|
||||
}
|
||||
for _, ss := range services {
|
||||
fromID := nameToID[ss.service.Name()]
|
||||
for _, dep := range ss.dependsOn {
|
||||
g.SetEdge(simple.Edge{F: simple.Node(fromID), T: simple.Node(nameToID[dep])})
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := topo.Sort(g); err == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var cycles [][]Name
|
||||
for _, scc := range topo.TarjanSCC(g) {
|
||||
if len(scc) > 1 {
|
||||
cycle := make([]Name, len(scc))
|
||||
for i, n := range scc {
|
||||
cycle[i] = idToName[n.ID()]
|
||||
}
|
||||
cycles = append(cycles, cycle)
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidRegistry, "dependency cycles detected: %v", cycles)
|
||||
}
|
||||
|
||||
@@ -5,10 +5,7 @@ import (
|
||||
"log/slog"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
@@ -31,46 +28,11 @@ func (s *tservice) Stop(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type healthyTestService struct {
|
||||
tservice
|
||||
healthyC chan struct{}
|
||||
}
|
||||
|
||||
func newHealthyTestService(t *testing.T) *healthyTestService {
|
||||
t.Helper()
|
||||
return &healthyTestService{
|
||||
tservice: tservice{c: make(chan struct{})},
|
||||
healthyC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *healthyTestService) Healthy() <-chan struct{} {
|
||||
return s.healthyC
|
||||
}
|
||||
|
||||
// failingHealthyService implements Healthy but fails before signaling healthy.
|
||||
type failingHealthyService struct {
|
||||
healthyC chan struct{}
|
||||
err error
|
||||
}
|
||||
|
||||
func (s *failingHealthyService) Start(_ context.Context) error {
|
||||
return s.err
|
||||
}
|
||||
|
||||
func (s *failingHealthyService) Stop(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *failingHealthyService) Healthy() <-chan struct{} {
|
||||
return s.healthyC
|
||||
}
|
||||
|
||||
func TestRegistryWith2Services(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
s2 := newTestService(t)
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
|
||||
registry, err := NewRegistry(slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
@@ -79,8 +41,8 @@ func TestRegistryWith2Services(t *testing.T) {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
registry.Start(ctx)
|
||||
assert.NoError(t, registry.Wait(ctx))
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
require.NoError(t, registry.Wait(ctx))
|
||||
require.NoError(t, registry.Stop(ctx))
|
||||
}()
|
||||
cancel()
|
||||
|
||||
@@ -91,7 +53,7 @@ func TestRegistryWith2ServicesWithoutWait(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
s2 := newTestService(t)
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
|
||||
registry, err := NewRegistry(slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
@@ -100,245 +62,8 @@ func TestRegistryWith2ServicesWithoutWait(t *testing.T) {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
registry.Start(ctx)
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
require.NoError(t, registry.Stop(ctx))
|
||||
}()
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func TestServiceStateTransitions(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
require.NoError(t, registry.AwaitHealthy(ctx))
|
||||
|
||||
byState := registry.ServicesByState()
|
||||
assert.Len(t, byState[StateRunning], 1)
|
||||
assert.True(t, registry.IsHealthy())
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestServiceStateWithHealthy(t *testing.T) {
|
||||
s1 := newHealthyTestService(t)
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
// Poll until STARTING state is observed
|
||||
require.Eventually(t, func() bool {
|
||||
byState := registry.ServicesByState()
|
||||
return len(byState[StateStarting]) == 1
|
||||
}, time.Second, time.Millisecond)
|
||||
assert.False(t, registry.IsHealthy())
|
||||
|
||||
// Signal healthy
|
||||
close(s1.healthyC)
|
||||
|
||||
require.NoError(t, registry.AwaitHealthy(ctx))
|
||||
assert.True(t, registry.IsHealthy())
|
||||
|
||||
byState := registry.ServicesByState()
|
||||
assert.Len(t, byState[StateRunning], 1)
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestAwaitHealthy(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
s2 := newTestService(t)
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
assert.NoError(t, registry.AwaitHealthy(ctx))
|
||||
assert.True(t, registry.IsHealthy())
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestAwaitHealthyWithFailure(t *testing.T) {
|
||||
s1 := &failingHealthyService{
|
||||
healthyC: make(chan struct{}),
|
||||
err: errors.Newf(errors.TypeInternal, errors.CodeInternal,"startup failed"),
|
||||
}
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
err = registry.AwaitHealthy(ctx)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "startup failed")
|
||||
}
|
||||
|
||||
func TestServicesByState(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
s2 := newHealthyTestService(t)
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1), NewNamedService(MustNewName("s2"), s2))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
// Wait for s1 to be running (no Healthy interface) and s2 to be starting
|
||||
require.Eventually(t, func() bool {
|
||||
byState := registry.ServicesByState()
|
||||
return len(byState[StateRunning]) == 1 && len(byState[StateStarting]) == 1
|
||||
}, time.Second, time.Millisecond)
|
||||
|
||||
// Make s2 healthy
|
||||
close(s2.healthyC)
|
||||
|
||||
require.NoError(t, registry.AwaitHealthy(ctx))
|
||||
byState := registry.ServicesByState()
|
||||
assert.Len(t, byState[StateRunning], 2)
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestDependsOnStartsAfterDependency(t *testing.T) {
|
||||
s1 := newHealthyTestService(t)
|
||||
s2 := newTestService(t)
|
||||
|
||||
// s2 depends on s1
|
||||
registry, err := NewRegistry(
|
||||
context.Background(),
|
||||
slog.New(slog.DiscardHandler),
|
||||
NewNamedService(MustNewName("s1"), s1),
|
||||
NewNamedService(MustNewName("s2"), s2, MustNewName("s1")),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
// s2 should still be STARTING because s1 hasn't become healthy yet
|
||||
require.Eventually(t, func() bool {
|
||||
byState := registry.ServicesByState()
|
||||
return len(byState[StateStarting]) == 2
|
||||
}, time.Second, time.Millisecond)
|
||||
|
||||
// Make s1 healthy — s2 should then start and become RUNNING
|
||||
close(s1.healthyC)
|
||||
|
||||
assert.NoError(t, registry.AwaitHealthy(ctx))
|
||||
assert.True(t, registry.IsHealthy())
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestDependsOnFailsWhenDependencyFails(t *testing.T) {
|
||||
s1 := &failingHealthyService{
|
||||
healthyC: make(chan struct{}),
|
||||
err: errors.Newf(errors.TypeInternal, errors.CodeInternal,"s1 crashed"),
|
||||
}
|
||||
s2 := newTestService(t)
|
||||
|
||||
// s2 depends on s1
|
||||
registry, err := NewRegistry(
|
||||
context.Background(),
|
||||
slog.New(slog.DiscardHandler),
|
||||
NewNamedService(MustNewName("s1"), s1),
|
||||
NewNamedService(MustNewName("s2"), s2, MustNewName("s1")),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
// Both should eventually fail
|
||||
assert.Eventually(t, func() bool {
|
||||
byState := registry.ServicesByState()
|
||||
return len(byState[StateFailed]) == 2
|
||||
}, time.Second, time.Millisecond)
|
||||
}
|
||||
|
||||
func TestDependsOnUnknownServiceIsIgnored(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
|
||||
// Unknown dependency is logged and ignored, not an error.
|
||||
registry, err := NewRegistry(
|
||||
context.Background(),
|
||||
slog.New(slog.DiscardHandler),
|
||||
NewNamedService(MustNewName("s1"), s1, MustNewName("unknown")),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
assert.NoError(t, registry.AwaitHealthy(ctx))
|
||||
assert.True(t, registry.IsHealthy())
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestServiceStateFailed(t *testing.T) {
|
||||
s1 := &failingHealthyService{
|
||||
healthyC: make(chan struct{}),
|
||||
err: errors.Newf(errors.TypeInternal, errors.CodeInternal,"fatal error"),
|
||||
}
|
||||
|
||||
registry, err := NewRegistry(context.Background(), slog.New(slog.DiscardHandler), NewNamedService(MustNewName("s1"), s1))
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
// Wait for the service to fail
|
||||
assert.Eventually(t, func() bool {
|
||||
byState := registry.ServicesByState()
|
||||
return len(byState[StateFailed]) == 1
|
||||
}, time.Second, time.Millisecond)
|
||||
assert.False(t, registry.IsHealthy())
|
||||
}
|
||||
|
||||
func TestDependsOnSelfDependencyIsIgnored(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
|
||||
// Self-dependency is logged and ignored.
|
||||
registry, err := NewRegistry(
|
||||
context.Background(),
|
||||
slog.New(slog.DiscardHandler),
|
||||
NewNamedService(MustNewName("s1"), s1, MustNewName("s1")),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
registry.Start(ctx)
|
||||
|
||||
assert.NoError(t, registry.AwaitHealthy(ctx))
|
||||
assert.True(t, registry.IsHealthy())
|
||||
|
||||
assert.NoError(t, registry.Stop(ctx))
|
||||
}
|
||||
|
||||
func TestDependsOnCycleReturnsError(t *testing.T) {
|
||||
s1 := newTestService(t)
|
||||
s2 := newTestService(t)
|
||||
|
||||
// A -> B and B -> A is a cycle.
|
||||
_, err := NewRegistry(
|
||||
context.Background(),
|
||||
slog.New(slog.DiscardHandler),
|
||||
NewNamedService(MustNewName("s1"), s1, MustNewName("s2")),
|
||||
NewNamedService(MustNewName("s2"), s2, MustNewName("s1")),
|
||||
)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "dependency cycles detected")
|
||||
}
|
||||
|
||||
@@ -2,81 +2,30 @@ package factory
|
||||
|
||||
import "context"
|
||||
|
||||
// Service is the core lifecycle interface for long-running services.
|
||||
type Service interface {
|
||||
// Starts a service. It should block and should not return until the service is stopped or it fails.
|
||||
Start(context.Context) error
|
||||
|
||||
// Stops a service.
|
||||
Stop(context.Context) error
|
||||
}
|
||||
|
||||
// Healthy is an optional interface that services can implement to signal
|
||||
// when they have completed startup and are ready to serve.
|
||||
// Services that do not implement this interface are considered healthy
|
||||
// immediately after Start() is called.
|
||||
type Healthy interface {
|
||||
// Healthy returns a channel that is closed when the service is healthy.
|
||||
Healthy() <-chan struct{}
|
||||
}
|
||||
|
||||
// ServiceWithHealthy is a Service that explicitly signals when it is healthy.
|
||||
type ServiceWithHealthy interface {
|
||||
Service
|
||||
Healthy
|
||||
}
|
||||
|
||||
// NamedService is a Service with a Name and optional dependencies.
|
||||
type NamedService interface {
|
||||
Named
|
||||
ServiceWithHealthy
|
||||
// DependsOn returns the names of services that must be healthy before this service starts.
|
||||
DependsOn() []Name
|
||||
Service
|
||||
}
|
||||
|
||||
// closedC is a pre-closed channel returned for services that don't implement Healthy.
|
||||
var closedC = func() chan struct{} {
|
||||
c := make(chan struct{})
|
||||
close(c)
|
||||
return c
|
||||
}()
|
||||
|
||||
type namedService struct {
|
||||
name Name
|
||||
dependsOn []Name
|
||||
service Service
|
||||
}
|
||||
|
||||
// NewNamedService wraps a Service with a Name and optional dependency names.
|
||||
func NewNamedService(name Name, service Service, dependsOn ...Name) NamedService {
|
||||
return &namedService{
|
||||
name: name,
|
||||
dependsOn: dependsOn,
|
||||
service: service,
|
||||
}
|
||||
name Name
|
||||
Service
|
||||
}
|
||||
|
||||
func (s *namedService) Name() Name {
|
||||
return s.name
|
||||
}
|
||||
|
||||
func (s *namedService) DependsOn() []Name {
|
||||
return s.dependsOn
|
||||
}
|
||||
|
||||
func (s *namedService) Start(ctx context.Context) error {
|
||||
return s.service.Start(ctx)
|
||||
}
|
||||
|
||||
func (s *namedService) Stop(ctx context.Context) error {
|
||||
return s.service.Stop(ctx)
|
||||
}
|
||||
|
||||
// Healthy delegates to the underlying service if it implements Healthy,
|
||||
// otherwise returns an already-closed channel (immediately healthy).
|
||||
func (s *namedService) Healthy() <-chan struct{} {
|
||||
if h, ok := s.service.(Healthy); ok {
|
||||
return h.Healthy()
|
||||
func NewNamedService(name Name, service Service) NamedService {
|
||||
return &namedService{
|
||||
name: name,
|
||||
Service: service,
|
||||
}
|
||||
return closedC
|
||||
}
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
package factory
|
||||
|
||||
import "sync"
|
||||
|
||||
// State represents the lifecycle state of a service.
|
||||
type State struct {
|
||||
s string
|
||||
}
|
||||
|
||||
func (s State) String() string {
|
||||
return s.s
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler so State can be used as a JSON map key.
|
||||
func (s State) MarshalText() ([]byte, error) {
|
||||
return []byte(s.s), nil
|
||||
}
|
||||
|
||||
var (
|
||||
StateStarting = State{"starting"}
|
||||
StateRunning = State{"running"}
|
||||
StateFailed = State{"failed"}
|
||||
)
|
||||
|
||||
// serviceWithState wraps a NamedService with thread-safe state tracking.
|
||||
type serviceWithState struct {
|
||||
// service is the underlying named service.
|
||||
service NamedService
|
||||
|
||||
// dependsOn is the validated subset of declared dependencies that exist in the registry.
|
||||
dependsOn []Name
|
||||
|
||||
// mu protects state and startErr from concurrent access.
|
||||
mu sync.RWMutex
|
||||
|
||||
// state is the current lifecycle state of the service.
|
||||
state State
|
||||
|
||||
// healthyC is closed when the service transitions to StateRunning.
|
||||
healthyC chan struct{}
|
||||
|
||||
// startReturnedC is closed when Start() returns, whether with nil or an error.
|
||||
startReturnedC chan struct{}
|
||||
|
||||
// startErr is the error returned by Start(), or nil if it returned successfully.
|
||||
startErr error
|
||||
}
|
||||
|
||||
func newServiceWithState(service NamedService) *serviceWithState {
|
||||
return &serviceWithState{
|
||||
service: service,
|
||||
state: StateStarting,
|
||||
healthyC: make(chan struct{}),
|
||||
startReturnedC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (ss *serviceWithState) setState(state State) {
|
||||
ss.mu.Lock()
|
||||
defer ss.mu.Unlock()
|
||||
ss.state = state
|
||||
if state == StateRunning {
|
||||
select {
|
||||
case <-ss.healthyC:
|
||||
default:
|
||||
close(ss.healthyC)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ss *serviceWithState) getState() State {
|
||||
ss.mu.RLock()
|
||||
defer ss.mu.RUnlock()
|
||||
return ss.state
|
||||
}
|
||||
@@ -53,6 +53,7 @@ type Module interface {
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
// GetConnectionArtifact creates a new cloud integration account and returns the connection artifact
|
||||
GetConnectionArtifact(http.ResponseWriter, *http.Request)
|
||||
ListAccounts(http.ResponseWriter, *http.Request)
|
||||
GetAccount(http.ResponseWriter, *http.Request)
|
||||
|
||||
58
pkg/modules/cloudintegration/implcloudintegration/handler.go
Normal file
58
pkg/modules/cloudintegration/implcloudintegration/handler.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package implcloudintegration
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
)
|
||||
|
||||
type handler struct{}
|
||||
|
||||
func NewHandler() cloudintegration.Handler {
|
||||
return &handler{}
|
||||
}
|
||||
|
||||
func (h handler) GetConnectionArtifact(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) ListAccounts(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) GetAccount(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) UpdateAccount(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) DisconnectAccount(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) ListServicesMetadata(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) GetService(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) UpdateService(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (h handler) AgentCheckIn(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
@@ -6,19 +6,20 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"slices"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -30,31 +31,129 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
// ExportRawData handles data export requests.
|
||||
//
|
||||
// API Documentation:
|
||||
// Endpoint: GET /api/v1/export_raw_data
|
||||
//
|
||||
// Query Parameters:
|
||||
//
|
||||
// - source (optional): Type of data to export ["logs" (default), "metrics", "traces"]
|
||||
// Note: Currently only "logs" is fully supported
|
||||
//
|
||||
// - format (optional): Output format ["csv" (default), "jsonl"]
|
||||
//
|
||||
// - start (required): Start time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - end (required): End time for query (Unix timestamp in nanoseconds)
|
||||
//
|
||||
// - limit (optional): Maximum number of rows to export
|
||||
// Constraints: Must be positive and cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT
|
||||
//
|
||||
// - filter (optional): Filter expression to apply to the query
|
||||
//
|
||||
// - columns (optional): Specific columns to include in export
|
||||
// Default: all columns are returned
|
||||
// Format: ["context.field:type", "context.field", "field"]
|
||||
//
|
||||
// - order_by (optional): Sorting specification ["column:direction" or "context.field:type:direction"]
|
||||
// Direction: "asc" or "desc"
|
||||
// Default: ["timestamp:desc", "id:desc"]
|
||||
//
|
||||
// Response Headers:
|
||||
// - Content-Type: "text/csv" or "application/x-ndjson"
|
||||
// - Content-Encoding: "gzip" (handled by HTTP middleware)
|
||||
// - Content-Disposition: "attachment; filename=\"data_exported.[format]\""
|
||||
// - Cache-Control: "no-cache"
|
||||
// - Vary: "Accept-Encoding"
|
||||
// - Transfer-Encoding: "chunked"
|
||||
// - Trailers: X-Response-Complete
|
||||
//
|
||||
// Response Format:
|
||||
//
|
||||
// CSV: Headers in first row, data in subsequent rows
|
||||
// JSONL: One JSON object per line
|
||||
//
|
||||
// Example Usage:
|
||||
//
|
||||
// Basic CSV export:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
//
|
||||
// Export with columns and format:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000&format=jsonl
|
||||
// &columns=timestamp&columns=severity&columns=message
|
||||
//
|
||||
// Export with filter and ordering:
|
||||
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
|
||||
// &filter=severity="error"&order_by=timestamp:desc&limit=1000
|
||||
func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
|
||||
var formatParam exporttypes.ExportRawDataFormatQueryParam
|
||||
if err := binding.Query.BindQuery(r.URL.Query(), &formatParam); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
format := formatParam.Format
|
||||
if err := binding.JSON.BindBody(r.Body, &queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateSpecForExport(&queryRangeRequest); err != nil {
|
||||
source, err := getExportQuerySource(r.URL.Query())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateAndApplyDefaultExportLimits(queryRangeRequest.CompositeQuery.Queries); err != nil {
|
||||
switch source {
|
||||
case "logs":
|
||||
handler.exportLogs(rw, r)
|
||||
case "traces":
|
||||
handler.exportTraces(rw, r)
|
||||
case "metrics":
|
||||
handler.exportMetrics(rw, r)
|
||||
default:
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs"))
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) exportMetrics(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "metrics export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportTraces(rw http.ResponseWriter, r *http.Request) {
|
||||
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "traces export is not yet supported"))
|
||||
}
|
||||
|
||||
func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
// Set up response headers
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding") // Indicate that response varies based on Accept-Encoding
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
|
||||
queryParams := r.URL.Query()
|
||||
|
||||
startTime, endTime, err := getExportQueryTimeRange(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeRequest.UseDefaultOrderBy()
|
||||
limit, err := getExportQueryLimit(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
format, err := getExportQueryFormat(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Set appropriate content type and filename
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
|
||||
filterExpression := queryParams.Get("filter")
|
||||
|
||||
orderByExpression, err := getExportQueryOrderBy(queryParams)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
columns := getExportQueryColumns(queryParams)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
@@ -62,98 +161,76 @@ func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
setExportResponseHeaders(rw, format)
|
||||
queryRangeRequest := qbtypes.QueryRangeRequest{
|
||||
Start: startTime,
|
||||
End: endTime,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
spec := qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Name: "raw",
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: filterExpression,
|
||||
},
|
||||
Limit: limit,
|
||||
Order: orderByExpression,
|
||||
}
|
||||
|
||||
spec.SelectFields = columns
|
||||
|
||||
queryRangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
// This will signal Export module to stop sending data
|
||||
doneChan := make(chan any)
|
||||
defer close(doneChan)
|
||||
rowChan, errChan := handler.module.ExportRawData(r.Context(), orgID, &queryRangeRequest, doneChan)
|
||||
|
||||
isComplete, err := handler.executeExport(rowChan, errChan, format, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
var isComplete bool
|
||||
|
||||
// validateSpecForExport validates query specs
|
||||
func validateSpecForExport(req *qbtypes.QueryRangeRequest) error {
|
||||
|
||||
queries := req.CompositeQuery.Queries
|
||||
|
||||
// If the trace operator query is not present, and there are multiple queries, return an error
|
||||
if req.TraceOperatorQueryIndex() == -1 && len(queries) > 1 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "multiple queries not allowed without a trace operator query")
|
||||
}
|
||||
|
||||
for idx := range queries {
|
||||
switch spec := queries[idx].Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
qbtypes.QueryBuilderTraceOperator:
|
||||
// Supported spec types
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported query at index %d type: %T", idx, spec)
|
||||
}
|
||||
}
|
||||
|
||||
opts := append(qbtypes.GetValidationOptions(req.RequestType), qbtypes.WithSkipLimitOffsetValidation())
|
||||
return req.Validate(opts...)
|
||||
}
|
||||
|
||||
func validateAndApplyDefaultExportLimits(queries []qbtypes.QueryEnvelope) error {
|
||||
for idx := range queries {
|
||||
limit := queries[idx].GetLimit()
|
||||
if limit == 0 {
|
||||
limit = DefaultExportRowCountLimit
|
||||
} else if limit < 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
} else if limit > MaxExportRowCountLimit {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
queries[idx].SetLimit(limit)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// setExportResponseHeaders sets common HTTP headers for export responses.
|
||||
func setExportResponseHeaders(rw http.ResponseWriter, format string) {
|
||||
rw.Header().Set("Cache-Control", "no-cache")
|
||||
rw.Header().Set("Vary", "Accept-Encoding")
|
||||
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
|
||||
rw.Header().Set("Trailer", "X-Response-Complete")
|
||||
rw.Header().Set("Transfer-Encoding", "chunked")
|
||||
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
|
||||
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
}
|
||||
|
||||
// executeExport streams data from rowChan to the response writer in the specified format.
|
||||
func (handler *handler) executeExport(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, format string, rw http.ResponseWriter) (bool, error) {
|
||||
switch format {
|
||||
case "csv", "":
|
||||
rw.Header().Set("Content-Type", "text/csv")
|
||||
csvWriter := csv.NewWriter(rw)
|
||||
isComplete, err := handler.exportRawDataCSV(rowChan, errChan, csvWriter)
|
||||
isComplete, err = handler.exportLogsCSV(rowChan, errChan, csvWriter)
|
||||
if err != nil {
|
||||
return false, err
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
csvWriter.Flush()
|
||||
return isComplete, nil
|
||||
case "jsonl":
|
||||
rw.Header().Set("Content-Type", "application/x-ndjson")
|
||||
return handler.exportRawDataJSONL(rowChan, errChan, rw)
|
||||
isComplete, err = handler.exportLogsJSONL(rowChan, errChan, rw)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
default:
|
||||
return false, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl"))
|
||||
return
|
||||
}
|
||||
|
||||
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
|
||||
}
|
||||
|
||||
// exportRawDataCSV is a generic CSV export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
|
||||
func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
|
||||
var header []string
|
||||
headerToIndexMapping := make(map[string]int)
|
||||
|
||||
headerToIndexMapping := make(map[string]int, len(header))
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
@@ -191,8 +268,8 @@ func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan
|
||||
}
|
||||
}
|
||||
|
||||
// exportRawDataJSONL is a generic JSONL export function that works with any raw data (logs, traces, etc.)
|
||||
func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
|
||||
|
||||
totalBytes := uint64(0)
|
||||
for {
|
||||
select {
|
||||
@@ -200,11 +277,9 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
|
||||
if !ok {
|
||||
return true, nil
|
||||
}
|
||||
jsonBytes, err := json.Marshal(row.Data)
|
||||
if err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error marshaling JSON: %s", err)
|
||||
}
|
||||
totalBytes += uint64(len(jsonBytes)) + 1
|
||||
// Handle JSON format (JSONL - one object per line)
|
||||
jsonBytes, _ := json.Marshal(row.Data)
|
||||
totalBytes += uint64(len(jsonBytes)) + 1 // +1 for newline
|
||||
|
||||
if _, err := writer.Write(jsonBytes); err != nil {
|
||||
return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON: %s", err)
|
||||
@@ -224,33 +299,74 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
|
||||
}
|
||||
}
|
||||
|
||||
// priorityColumns defines the columns that should appear first in the CSV output, in order.
|
||||
var priorityColumns = []string{"timestamp", "id"}
|
||||
func getExportQuerySource(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("source") {
|
||||
case "logs", "":
|
||||
return "logs", nil
|
||||
case "metrics":
|
||||
return "metrics", errors.NewInvalidInputf(errors.CodeInvalidInput, "metrics export not yet supported")
|
||||
case "traces":
|
||||
return "traces", errors.NewInvalidInputf(errors.CodeInvalidInput, "traces export not yet supported")
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs, metrics or traces")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryFormat(queryParams url.Values) (string, error) {
|
||||
switch queryParams.Get("format") {
|
||||
case "csv", "":
|
||||
return "csv", nil
|
||||
case "jsonl":
|
||||
return "jsonl", nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryLimit(queryParams url.Values) (int, error) {
|
||||
|
||||
limitStr := queryParams.Get("limit")
|
||||
if limitStr == "" {
|
||||
return DefaultExportRowCountLimit, nil
|
||||
} else {
|
||||
limit, err := strconv.Atoi(limitStr)
|
||||
if err != nil {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid limit format: %s", err.Error())
|
||||
}
|
||||
if limit <= 0 {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
|
||||
}
|
||||
if limit > MaxExportRowCountLimit {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
|
||||
}
|
||||
return limit, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getExportQueryTimeRange(queryParams url.Values) (uint64, uint64, error) {
|
||||
|
||||
startTimeStr := queryParams.Get("start")
|
||||
endTimeStr := queryParams.Get("end")
|
||||
|
||||
if startTimeStr == "" || endTimeStr == "" {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start and end time are required")
|
||||
}
|
||||
startTime, err := strconv.ParseUint(startTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time format: %s", err.Error())
|
||||
}
|
||||
endTime, err := strconv.ParseUint(endTimeStr, 10, 64)
|
||||
if err != nil {
|
||||
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time format: %s", err.Error())
|
||||
}
|
||||
return startTime, endTime, nil
|
||||
}
|
||||
|
||||
func constructCSVHeaderFromQueryResponse(data map[string]any) []string {
|
||||
header := make([]string, 0, len(data))
|
||||
for key := range data {
|
||||
header = append(header, key)
|
||||
}
|
||||
// This is to ensure CSV output is consistent across multiple queries
|
||||
slices.SortFunc(header, func(a, b string) int {
|
||||
ai, bi := slices.Index(priorityColumns, a), slices.Index(priorityColumns, b)
|
||||
switch {
|
||||
case ai != -1 && bi != -1:
|
||||
return ai - bi
|
||||
case ai != -1:
|
||||
return -1
|
||||
case bi != -1:
|
||||
return 1
|
||||
default:
|
||||
if a < b {
|
||||
return -1
|
||||
} else if a > b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
})
|
||||
return header
|
||||
}
|
||||
|
||||
@@ -311,12 +427,9 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
valueStr = v.String()
|
||||
|
||||
default:
|
||||
jsonBytes, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
valueStr = fmt.Sprintf("%v", v)
|
||||
} else {
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
// For all other complex types (maps, structs, etc.)
|
||||
jsonBytes, _ := json.Marshal(v)
|
||||
valueStr = string(jsonBytes)
|
||||
}
|
||||
|
||||
record[index] = sanitizeForCSV(valueStr)
|
||||
@@ -325,6 +438,26 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
|
||||
return record
|
||||
}
|
||||
|
||||
// getExportQueryColumns parses the "columns" query parameters and returns a slice of TelemetryFieldKey structs.
|
||||
// Each column should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryColumns(queryParams url.Values) []telemetrytypes.TelemetryFieldKey {
|
||||
columnParams := queryParams["columns"]
|
||||
|
||||
columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams))
|
||||
|
||||
for _, columnStr := range columnParams {
|
||||
// Skip empty strings
|
||||
columnStr = strings.TrimSpace(columnStr)
|
||||
if columnStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr))
|
||||
}
|
||||
|
||||
return columns
|
||||
}
|
||||
|
||||
func getsizeOfStringSlice(slice []string) uint64 {
|
||||
var totalBytes uint64
|
||||
for _, str := range slice {
|
||||
@@ -332,3 +465,52 @@ func getsizeOfStringSlice(slice []string) uint64 {
|
||||
}
|
||||
return totalBytes
|
||||
}
|
||||
|
||||
// getExportQueryOrderBy parses the "order_by" query parameters and returns a slice of OrderBy structs.
|
||||
// Each "order_by" parameter should be in the format "column:direction"
|
||||
// Each "column" should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
|
||||
func getExportQueryOrderBy(queryParams url.Values) ([]qbtypes.OrderBy, error) {
|
||||
orderByParam := queryParams.Get("order_by")
|
||||
|
||||
orderByParam = strings.TrimSpace(orderByParam)
|
||||
if orderByParam == "" {
|
||||
return telemetrylogs.DefaultLogsV2SortingOrder, nil
|
||||
}
|
||||
|
||||
parts := strings.Split(orderByParam, ":")
|
||||
if len(parts) != 2 && len(parts) != 3 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by format: %s, should be <column>:<direction>", orderByParam)
|
||||
}
|
||||
|
||||
column := strings.Join(parts[:len(parts)-1], ":")
|
||||
direction := parts[len(parts)-1]
|
||||
|
||||
orderDirection, ok := qbtypes.OrderDirectionMap[direction]
|
||||
if !ok {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by direction: %s, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
|
||||
}
|
||||
|
||||
orderByKey := telemetrytypes.GetFieldKeyFromKeyText(column)
|
||||
|
||||
orderBy := []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: orderByKey,
|
||||
},
|
||||
Direction: orderDirection,
|
||||
},
|
||||
}
|
||||
|
||||
// If we are ordering by the timestamp column, also order by the ID column
|
||||
if orderByKey.Name == telemetrylogs.LogsV2TimestampColumn {
|
||||
orderBy = append(orderBy, qbtypes.OrderBy{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
Direction: orderDirection,
|
||||
})
|
||||
}
|
||||
return orderBy, nil
|
||||
}
|
||||
|
||||
@@ -2,84 +2,58 @@ package implrawdataexport
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/types/exporttypes"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExportRawDataFormatQueryParam_BindingDefaults(t *testing.T) {
|
||||
var params exporttypes.ExportRawDataFormatQueryParam
|
||||
err := binding.Query.BindQuery(url.Values{}, ¶ms)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "csv", params.Format)
|
||||
}
|
||||
|
||||
func logQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func traceQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{Limit: limit},
|
||||
}
|
||||
}
|
||||
|
||||
func traceOperatorQuery(limit int) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{Limit: limit, Expression: "A"},
|
||||
}
|
||||
}
|
||||
|
||||
func makeRequest(queries ...qbtypes.QueryEnvelope) qbtypes.QueryRangeRequest {
|
||||
return qbtypes.QueryRangeRequest{
|
||||
Start: 1000000000000,
|
||||
End: 1000003600000,
|
||||
RequestType: qbtypes.RequestTypeRaw,
|
||||
CompositeQuery: qbtypes.CompositeQuery{Queries: queries},
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateSpecForExport(t *testing.T) {
|
||||
func TestGetExportQuerySource(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
req qbtypes.QueryRangeRequest
|
||||
expectedError bool
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedSource string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "single log query",
|
||||
req: makeRequest(logQuery(0)),
|
||||
name: "default logs source",
|
||||
queryParams: url.Values{},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single trace query",
|
||||
req: makeRequest(traceQuery(0)),
|
||||
name: "explicit logs source",
|
||||
queryParams: url.Values{"source": {"logs"}},
|
||||
expectedSource: "logs",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "trace operator alone",
|
||||
req: makeRequest(traceOperatorQuery(0)),
|
||||
name: "metrics source - not supported",
|
||||
queryParams: url.Values{"source": {"metrics"}},
|
||||
expectedSource: "metrics",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "multiple queries without trace operator",
|
||||
req: makeRequest(logQuery(0), traceQuery(0)),
|
||||
expectedError: true,
|
||||
name: "traces source - not supported",
|
||||
queryParams: url.Values{"source": {"traces"}},
|
||||
expectedSource: "traces",
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "unsupported query type",
|
||||
req: makeRequest(qbtypes.QueryEnvelope{Type: qbtypes.QueryTypeBuilder, Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{}}),
|
||||
expectedError: true,
|
||||
name: "invalid source",
|
||||
queryParams: url.Values{"source": {"invalid"}},
|
||||
expectedSource: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateSpecForExport(&tt.req)
|
||||
source, err := getExportQuerySource(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedSource, source)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
@@ -89,69 +63,456 @@ func TestValidateSpecForExport(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateAndApplyDefaultExportLimits(t *testing.T) {
|
||||
func TestGetExportQueryFormat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queries []qbtypes.QueryEnvelope
|
||||
expectedError bool
|
||||
checkQueries func(t *testing.T, queries []qbtypes.QueryEnvelope)
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedFormat string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "single log query, zero limit gets default",
|
||||
queries: makeRequest(logQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
name: "default csv format",
|
||||
queryParams: url.Values{},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, valid limit kept",
|
||||
queries: makeRequest(logQuery(1000)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, 1000, q[0].GetLimit())
|
||||
},
|
||||
name: "explicit csv format",
|
||||
queryParams: url.Values{"format": {"csv"}},
|
||||
expectedFormat: "csv",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, max limit kept",
|
||||
queries: makeRequest(logQuery(MaxExportRowCountLimit)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, MaxExportRowCountLimit, q[0].GetLimit())
|
||||
},
|
||||
name: "jsonl format",
|
||||
queryParams: url.Values{"format": {"jsonl"}},
|
||||
expectedFormat: "jsonl",
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single log query, limit exceeds max",
|
||||
queries: makeRequest(logQuery(MaxExportRowCountLimit + 1)).CompositeQuery.Queries,
|
||||
name: "invalid format",
|
||||
queryParams: url.Values{"format": {"xml"}},
|
||||
expectedFormat: "",
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
format, err := getExportQueryFormat(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedFormat, format)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryLimit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedLimit int
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "default limit",
|
||||
queryParams: url.Values{},
|
||||
expectedLimit: DefaultExportRowCountLimit,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid limit",
|
||||
queryParams: url.Values{"limit": {"5000"}},
|
||||
expectedLimit: 5000,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "maximum limit",
|
||||
queryParams: url.Values{"limit": {strconv.Itoa(MaxExportRowCountLimit)}},
|
||||
expectedLimit: MaxExportRowCountLimit,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "limit exceeds maximum",
|
||||
queryParams: url.Values{"limit": {"100000"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single log query, negative limit",
|
||||
queries: makeRequest(logQuery(-1)).CompositeQuery.Queries,
|
||||
name: "invalid limit format",
|
||||
queryParams: url.Values{"limit": {"invalid"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single trace query, zero limit gets default",
|
||||
queries: makeRequest(traceQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
name: "negative limit",
|
||||
queryParams: url.Values{"limit": {"-100"}},
|
||||
expectedLimit: 0,
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
limit, err := getExportQueryLimit(tt.queryParams)
|
||||
assert.Equal(t, tt.expectedLimit, limit)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeRange(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedStartTime uint64
|
||||
expectedEndTime uint64
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "valid time range",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedStartTime: 1640995200,
|
||||
expectedEndTime: 1641081600,
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "missing start time",
|
||||
queryParams: url.Values{"end": {"1641081600"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing end time",
|
||||
queryParams: url.Values{"start": {"1640995200"}},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "missing both times",
|
||||
queryParams: url.Values{},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid start time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"invalid"},
|
||||
"end": {"1641081600"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "invalid end time format",
|
||||
queryParams: url.Values{
|
||||
"start": {"1640995200"},
|
||||
"end": {"invalid"},
|
||||
},
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
startTime, endTime, err := getExportQueryTimeRange(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedStartTime, startTime)
|
||||
assert.Equal(t, tt.expectedEndTime, endTime)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryColumns(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedColumns []telemetrytypes.TelemetryFieldKey
|
||||
}{
|
||||
{
|
||||
name: "no columns specified",
|
||||
queryParams: url.Values{},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{},
|
||||
},
|
||||
{
|
||||
name: "single column",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "trace operator alone, zero limit gets default",
|
||||
queries: makeRequest(traceOperatorQuery(0)).CompositeQuery.Queries,
|
||||
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
|
||||
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
|
||||
name: "multiple columns",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "message", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "message"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "whitespace column name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", " ", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with data type",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user:string", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid column name with dot notation",
|
||||
queryParams: url.Values{
|
||||
"columns": {"timestamp", "attribute.user.string", "level"},
|
||||
},
|
||||
expectedColumns: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "timestamp"},
|
||||
{Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute},
|
||||
{Name: "level"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateAndApplyDefaultExportLimits(tt.queries)
|
||||
columns := getExportQueryColumns(tt.queryParams)
|
||||
assert.Equal(t, len(tt.expectedColumns), len(columns))
|
||||
for i, expectedCol := range tt.expectedColumns {
|
||||
assert.Equal(t, expectedCol, columns[i])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryOrderBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryParams url.Values
|
||||
expectedOrder []qbtypes.OrderBy
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "no order specified",
|
||||
queryParams: url.Values{},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "single order error, direction not specified",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "single order no error",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "multiple orders",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "body:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "empty order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", "", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "whitespace order name (should be skipped)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"timestamp:asc", " ", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2TimestampColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: telemetrylogs.LogsV2IDColumn,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "invalid order name (should error out)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attributes.user:", "id:asc"},
|
||||
},
|
||||
expectedOrder: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user:string:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "valid order name (should be included)",
|
||||
queryParams: url.Values{
|
||||
"order_by": {"attribute.user.string:desc", "id:asc"},
|
||||
},
|
||||
expectedOrder: []qbtypes.OrderBy{
|
||||
{
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "user.string",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
order, err := getExportQueryOrderBy(tt.queryParams)
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
if tt.checkQueries != nil {
|
||||
tt.checkQueries(t, tt.queries)
|
||||
assert.Equal(t, len(tt.expectedOrder), len(order))
|
||||
for i, expectedOrd := range tt.expectedOrder {
|
||||
assert.Equal(t, expectedOrd, order[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -168,8 +529,13 @@ func TestConstructCSVHeaderFromQueryResponse(t *testing.T) {
|
||||
|
||||
header := constructCSVHeaderFromQueryResponse(data)
|
||||
|
||||
// Priority columns come first in order, then the rest alphabetically.
|
||||
assert.Equal(t, []string{"timestamp", "id", "level", "message"}, header)
|
||||
// Since map iteration order is not guaranteed, check that all expected keys are present
|
||||
expectedKeys := []string{"timestamp", "message", "level", "id"}
|
||||
assert.Equal(t, len(expectedKeys), len(header))
|
||||
|
||||
for _, key := range expectedKeys {
|
||||
assert.Contains(t, header, key)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConstructCSVRecordFromQueryResponse(t *testing.T) {
|
||||
|
||||
@@ -28,18 +28,8 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
|
||||
instrumentationtypes.CodeFunctionName: "ExportRawData",
|
||||
})
|
||||
|
||||
traceOperatorQueryIndex := rangeRequest.TraceOperatorQueryIndex()
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
|
||||
// If the trace operator query is present, mark the queries other than trace operator as disabled
|
||||
if traceOperatorQueryIndex > -1 {
|
||||
for idx := range len(queries) {
|
||||
if idx != traceOperatorQueryIndex {
|
||||
queries[idx].SetDisabled(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
spec := rangeRequest.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.LogAggregation])
|
||||
rowCountLimit := spec.Limit
|
||||
|
||||
rowChan := make(chan *qbtypes.RawRow, 1)
|
||||
errChan := make(chan error, 1)
|
||||
@@ -53,62 +43,52 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
|
||||
defer close(errChan)
|
||||
defer close(rowChan)
|
||||
|
||||
if traceOperatorQueryIndex > -1 {
|
||||
// If the trace operator query is present, we need to export the data for the trace operator query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, traceOperatorQueryIndex)
|
||||
} else {
|
||||
// If the trace operator query is not present, we need to export the data for the first query only
|
||||
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, 0)
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
spec.Limit = min(ChunkSize, rowCountLimit-rowCount)
|
||||
spec.Offset = rowCount
|
||||
|
||||
rangeRequest.CompositeQuery.Queries[0].Spec = spec
|
||||
|
||||
response, err := m.querier.QueryRange(contextWithTimeout, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Break if we did not receive any new rows
|
||||
if newRowsCount == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
}
|
||||
}()
|
||||
|
||||
return rowChan, errChan
|
||||
|
||||
}
|
||||
|
||||
func exportRawDataForSingleQuery(querier querier.Querier, ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, rowChan chan *qbtypes.RawRow, errChan chan error, doneChan chan any, queryIndex int) {
|
||||
|
||||
queries := rangeRequest.CompositeQuery.Queries
|
||||
rowCountLimit := queries[queryIndex].GetLimit()
|
||||
rowCount := 0
|
||||
|
||||
for rowCount < rowCountLimit {
|
||||
chunkSize := min(ChunkSize, rowCountLimit-rowCount)
|
||||
queries[queryIndex].SetLimit(chunkSize)
|
||||
queries[queryIndex].SetOffset(rowCount)
|
||||
|
||||
response, err := querier.QueryRange(ctx, orgID, rangeRequest)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount := 0
|
||||
for _, result := range response.Data.Results {
|
||||
resultData, ok := result.(*qbtypes.RawData)
|
||||
if !ok {
|
||||
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
|
||||
return
|
||||
}
|
||||
|
||||
newRowsCount += len(resultData.Rows)
|
||||
for _, row := range resultData.Rows {
|
||||
select {
|
||||
case rowChan <- row:
|
||||
case <-doneChan:
|
||||
return
|
||||
case <-ctx.Done():
|
||||
errChan <- ctx.Err()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rowCount += newRowsCount
|
||||
|
||||
// Stop if we received fewer rows than requested — no more data available
|
||||
if newRowsCount < chunkSize {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ type service struct {
|
||||
authz authz.AuthZ
|
||||
config user.RootConfig
|
||||
stopC chan struct{}
|
||||
healthyC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
@@ -43,14 +42,12 @@ func NewService(
|
||||
orgGetter: orgGetter,
|
||||
authz: authz,
|
||||
config: config,
|
||||
stopC: make(chan struct{}),
|
||||
healthyC: make(chan struct{}),
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error {
|
||||
if !s.config.Enabled {
|
||||
close(s.healthyC)
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
@@ -62,7 +59,6 @@ func (s *service) Start(ctx context.Context) error {
|
||||
err := s.reconcile(ctx)
|
||||
if err == nil {
|
||||
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
|
||||
close(s.healthyC)
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
@@ -78,10 +74,6 @@ func (s *service) Start(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Healthy() <-chan struct{} {
|
||||
return s.healthyC
|
||||
}
|
||||
|
||||
func (s *service) Stop(ctx context.Context) error {
|
||||
close(s.stopC)
|
||||
return nil
|
||||
|
||||
@@ -3,5 +3,5 @@ package user
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type Service interface {
|
||||
factory.ServiceWithHealthy
|
||||
factory.Service
|
||||
}
|
||||
|
||||
@@ -68,7 +68,6 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
key string // deterministic join of label values
|
||||
}
|
||||
seriesMap := map[sKey]*qbtypes.TimeSeries{}
|
||||
var keyOrder []sKey // preserves ClickHouse row-arrival order
|
||||
|
||||
stepMs := uint64(step.Duration.Milliseconds())
|
||||
|
||||
@@ -219,7 +218,6 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
if !ok {
|
||||
series = &qbtypes.TimeSeries{Labels: lblObjs}
|
||||
seriesMap[key] = series
|
||||
keyOrder = append(keyOrder, key)
|
||||
}
|
||||
series.Values = append(series.Values, &qbtypes.TimeSeriesValue{
|
||||
Timestamp: ts,
|
||||
@@ -251,8 +249,8 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
Alias: "__result_" + strconv.Itoa(i),
|
||||
}
|
||||
}
|
||||
for _, k := range keyOrder {
|
||||
buckets[k.agg].Series = append(buckets[k.agg].Series, seriesMap[k])
|
||||
for k, s := range seriesMap {
|
||||
buckets[k.agg].Series = append(buckets[k.agg].Series, s)
|
||||
}
|
||||
|
||||
var nonEmpty []*qbtypes.AggregationBucket
|
||||
|
||||
@@ -188,6 +188,22 @@ func postProcessMetricQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
config := query.Aggregations[0]
|
||||
spaceAggOrderBy := fmt.Sprintf("%s(%s)", config.SpaceAggregation.StringValue(), config.MetricName)
|
||||
timeAggOrderBy := fmt.Sprintf("%s(%s)", config.TimeAggregation.StringValue(), config.MetricName)
|
||||
timeSpaceAggOrderBy := fmt.Sprintf("%s(%s(%s))", config.SpaceAggregation.StringValue(), config.TimeAggregation.StringValue(), config.MetricName)
|
||||
|
||||
for idx := range query.Order {
|
||||
if query.Order[idx].Key.Name == spaceAggOrderBy ||
|
||||
query.Order[idx].Key.Name == timeAggOrderBy ||
|
||||
query.Order[idx].Key.Name == timeSpaceAggOrderBy {
|
||||
query.Order[idx].Key.Name = qbtypes.DefaultOrderByKey
|
||||
}
|
||||
}
|
||||
|
||||
result = q.applySeriesLimit(result, query.Limit, query.Order)
|
||||
|
||||
if len(query.Functions) > 0 {
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
|
||||
@@ -576,6 +576,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
aH.LicensingAPI.Activate(rw, req)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// Export
|
||||
router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
|
||||
|
||||
// Query Filter Analyzer api used to extract metric names and grouping columns from a query
|
||||
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/apdex"
|
||||
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
@@ -38,24 +40,24 @@ import (
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
SavedView savedview.Handler
|
||||
Apdex apdex.Handler
|
||||
Dashboard dashboard.Handler
|
||||
QuickFilter quickfilter.Handler
|
||||
TraceFunnel tracefunnel.Handler
|
||||
RawDataExport rawdataexport.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
Global global.Handler
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
ServiceAccountHandler serviceaccount.Handler
|
||||
RegistryHandler factory.Handler
|
||||
SavedView savedview.Handler
|
||||
Apdex apdex.Handler
|
||||
Dashboard dashboard.Handler
|
||||
QuickFilter quickfilter.Handler
|
||||
TraceFunnel tracefunnel.Handler
|
||||
RawDataExport rawdataexport.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
Global global.Handler
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
ServiceAccountHandler serviceaccount.Handler
|
||||
CloudIntegrationHandler cloudintegration.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
@@ -70,26 +72,25 @@ func NewHandlers(
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore,
|
||||
authz authz.AuthZ,
|
||||
zeusService zeus.Zeus,
|
||||
registryHandler factory.Handler,
|
||||
) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
|
||||
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
|
||||
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
|
||||
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Global: signozglobal.NewHandler(global),
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
ServiceAccountHandler: implserviceaccount.NewHandler(modules.ServiceAccount),
|
||||
RegistryHandler: registryHandler,
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
|
||||
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
|
||||
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
|
||||
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Global: signozglobal.NewHandler(global),
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
ServiceAccountHandler: implserviceaccount.NewHandler(modules.ServiceAccount),
|
||||
CloudIntegrationHandler: implcloudintegration.NewHandler(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -56,8 +55,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore)
|
||||
|
||||
querierHandler := querier.NewHandler(providerSettings, nil, nil)
|
||||
registryHandler := factory.NewHandler(nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, querierHandler, nil, nil, nil, nil, nil, nil, nil, registryHandler)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, querierHandler, nil, nil, nil, nil, nil, nil, nil)
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
f := reflectVal.Field(i)
|
||||
|
||||
@@ -10,20 +10,19 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/apiserver"
|
||||
"github.com/SigNoz/signoz/pkg/apiserver/signozapiserver"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -60,11 +59,10 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ gateway.Handler }{},
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ rawdataexport.Handler }{},
|
||||
struct{ zeus.Handler }{},
|
||||
struct{ querier.Handler }{},
|
||||
struct{ serviceaccount.Handler }{},
|
||||
struct{ factory.Handler }{},
|
||||
struct{ cloudintegration.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -274,11 +274,10 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.GatewayHandler,
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
handlers.RawDataExport,
|
||||
handlers.ZeusHandler,
|
||||
handlers.QuerierHandler,
|
||||
handlers.ServiceAccountHandler,
|
||||
handlers.RegistryHandler,
|
||||
handlers.CloudIntegrationHandler,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -422,6 +422,21 @@ func New(
|
||||
// Initialize the querier handler via callback (allows EE to decorate with anomaly detection)
|
||||
querierHandler := querierHandlerCallback(providerSettings, querier, analytics)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, analytics, querierHandler, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.APIServer,
|
||||
NewAPIServerProviderFactories(orgGetter, authz, modules, handlers),
|
||||
"signoz",
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create a list of all stats collectors
|
||||
statsCollectors := []statsreporter.StatsCollector{
|
||||
alertmanager,
|
||||
@@ -448,7 +463,6 @@ func New(
|
||||
}
|
||||
|
||||
registry, err := factory.NewRegistry(
|
||||
ctx,
|
||||
instrumentation.Logger(),
|
||||
factory.NewNamedService(factory.MustNewName("instrumentation"), instrumentation),
|
||||
factory.NewNamedService(factory.MustNewName("pprof"), pprofService),
|
||||
@@ -458,23 +472,7 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
|
||||
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService, factory.MustNewName("authz")),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
registryHandler := factory.NewHandler(registry)
|
||||
handlers := NewHandlers(modules, providerSettings, analytics, querierHandler, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus, registryHandler)
|
||||
|
||||
// Initialize the API server (after registry so it can access service health)
|
||||
apiserverInstance, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.APIServer,
|
||||
NewAPIServerProviderFactories(orgGetter, authz, modules, handlers),
|
||||
"signoz",
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -492,7 +490,7 @@ func New(
|
||||
Prometheus: prometheus,
|
||||
Alertmanager: alertmanager,
|
||||
Querier: querier,
|
||||
APIServer: apiserverInstance,
|
||||
APIServer: apiserver,
|
||||
Zeus: zeus,
|
||||
Licensing: licensing,
|
||||
Emailing: emailing,
|
||||
|
||||
@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`host.name`) IN (SELECT `host.name` FROM __spatial_aggregation_cte GROUP BY `host.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `host.name`) DESC, `host.name`, ts ASC",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
@@ -540,13 +539,6 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
func isMetricAggOrderByKey(key string, config qbtypes.MetricAggregation) bool {
|
||||
spaceAggOrderBy := fmt.Sprintf("%s(%s)", config.SpaceAggregation.StringValue(), config.MetricName)
|
||||
timeAggOrderBy := fmt.Sprintf("%s(%s)", config.TimeAggregation.StringValue(), config.MetricName)
|
||||
timeSpaceAggOrderBy := fmt.Sprintf("%s(%s(%s))", config.SpaceAggregation.StringValue(), config.TimeAggregation.StringValue(), config.MetricName)
|
||||
return key == spaceAggOrderBy || key == timeAggOrderBy || key == timeSpaceAggOrderBy
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
cteFragments []string,
|
||||
cteArgs [][]any,
|
||||
@@ -554,159 +546,73 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
) (*qbtypes.Statement, error) {
|
||||
metricType := query.Aggregations[0].Type
|
||||
spaceAgg := query.Aggregations[0].SpaceAggregation
|
||||
finalCTE := "__spatial_aggregation_cte"
|
||||
if metricType == metrictypes.HistogramType {
|
||||
histogramCTE, histogramCTEArgs, err := b.buildHistogramCTE(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cteFragments = append(cteFragments, histogramCTE)
|
||||
cteArgs = append(cteArgs, histogramCTEArgs)
|
||||
finalCTE = "__histogram_cte"
|
||||
}
|
||||
|
||||
groupByKeys := querybuilder.GroupByKeys(query.GroupBy)
|
||||
hasGroupBy := len(groupByKeys) > 0
|
||||
|
||||
if hasGroupBy {
|
||||
cteWithAvgColumn := b.buildCTEWithAvgColumn(query, finalCTE)
|
||||
cteFragments = append(cteFragments, cteWithAvgColumn)
|
||||
|
||||
cteWithGroupRankColumn := b.buildCTEWithGroupRank(query)
|
||||
cteFragments = append(cteFragments, cteWithGroupRankColumn)
|
||||
finalCTE = "__with_group_rank_cte"
|
||||
}
|
||||
|
||||
combined := querybuilder.CombineCTEs(cteFragments)
|
||||
|
||||
var args []any
|
||||
for _, a := range cteArgs {
|
||||
args = append(args, a...)
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("ts")
|
||||
if metricType == metrictypes.HistogramType && spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam == nil {
|
||||
sb.SelectMore("le")
|
||||
}
|
||||
sb.SelectMore(groupByKeys...)
|
||||
sb.SelectMore("value")
|
||||
sb.From(finalCTE)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
|
||||
if hasGroupBy {
|
||||
sb.OrderBy("group_rank")
|
||||
if query.Limit > 0 {
|
||||
sb.Where(fmt.Sprintf("group_rank <= %d", query.Limit))
|
||||
if metricType == metrictypes.HistogramType && spaceAgg.IsPercentile() {
|
||||
quantile := query.Aggregations[0].SpaceAggregation.Percentile()
|
||||
sb.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
sb.SelectMore(fmt.Sprintf(
|
||||
"histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) AS value",
|
||||
quantile,
|
||||
))
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else if metricType == metrictypes.HistogramType && spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam != nil {
|
||||
sb.Select("ts")
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggQuery, err := AggregationQueryForHistogramCountWithParams(query.Aggregations[0].ComparisonSpaceAggregationParam)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sb.SelectMore(aggQuery)
|
||||
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else {
|
||||
// for count aggregation on histograms with no params, the exact result of spatial aggregation can be sent forward
|
||||
sb.Select("*")
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
}
|
||||
sb.OrderBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.OrderBy("ts")
|
||||
if metricType == metrictypes.HistogramType && spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam == nil {
|
||||
sb.OrderBy("toFloat64(le)")
|
||||
}
|
||||
sb.OrderBy("ts ASC")
|
||||
|
||||
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return &qbtypes.Statement{Query: combined + q, Args: append(args, a...)}, nil
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildHistogramCTE(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
) (string, []any, error) {
|
||||
spaceAgg := query.Aggregations[0].SpaceAggregation
|
||||
histogramCTEQueryBuilder := sqlbuilder.NewSelectBuilder()
|
||||
if spaceAgg.IsPercentile() {
|
||||
histogramCTEQueryBuilder.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
histogramCTEQueryBuilder.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
quantile := spaceAgg.Percentile()
|
||||
histogramCTEQueryBuilder.SelectMore(fmt.Sprintf(
|
||||
"histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) AS value",
|
||||
quantile,
|
||||
))
|
||||
histogramCTEQueryBuilder.From("__spatial_aggregation_cte")
|
||||
histogramCTEQueryBuilder.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
histogramCTEQueryBuilder.GroupBy("ts")
|
||||
} else if spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam != nil {
|
||||
histogramCTEQueryBuilder.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
histogramCTEQueryBuilder.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
aggQuery, err := AggregationQueryForHistogramCountWithParams(query.Aggregations[0].ComparisonSpaceAggregationParam)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
histogramCTEQueryBuilder.SelectMore(aggQuery)
|
||||
histogramCTEQueryBuilder.From("__spatial_aggregation_cte")
|
||||
histogramCTEQueryBuilder.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
histogramCTEQueryBuilder.GroupBy("ts")
|
||||
} else {
|
||||
// for count aggregation on histograms with no params, the exact result of spatial aggregation can be sent forward
|
||||
histogramCTEQueryBuilder.Select("*")
|
||||
histogramCTEQueryBuilder.From("__spatial_aggregation_cte")
|
||||
}
|
||||
histogramQueryCTE, histogramQueryCTEArgs := histogramCTEQueryBuilder.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
histogramCTE := fmt.Sprintf("__histogram_cte AS (%s)", histogramQueryCTE)
|
||||
|
||||
return histogramCTE, histogramQueryCTEArgs, nil
|
||||
}
|
||||
|
||||
/*
|
||||
this receives a CTE (__spatial_aggregation_cte or __histogram_cte) that has columns ts, value, and a column each for all the group by keys
|
||||
it creates a CTE (__with_avg_cte) that adds a column avg_val which has the avg value for the group the row belongs in
|
||||
*/
|
||||
func (b *MetricQueryStatementBuilder) buildCTEWithAvgColumn(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
latestCTE string,
|
||||
) string {
|
||||
withAvgCTEBuilder := sqlbuilder.NewSelectBuilder()
|
||||
withAvgCTEBuilder.Select("*")
|
||||
groupByKeys := querybuilder.GroupByKeys(query.GroupBy)
|
||||
withAvgCTEBuilder.SelectMore(fmt.Sprintf("avgIf(value, isNaN(value) = 0) OVER (PARTITION BY %s) AS avg_val", strings.Join(groupByKeys, ",")))
|
||||
withAvgCTEBuilder.From(latestCTE)
|
||||
withAvgCTEQuery, _ := withAvgCTEBuilder.BuildWithFlavor(sqlbuilder.ClickHouse) // no args so second return param is ignored
|
||||
withAvgCTE := fmt.Sprintf("__with_avg_cte AS (%s)", withAvgCTEQuery)
|
||||
return withAvgCTE
|
||||
}
|
||||
|
||||
/*
|
||||
this receives the __with_avg_cte that has columns ts, value, a column each for all the group by keys, and avg_val which has the avg value for the group the row belongs in
|
||||
it creates a CTE (__with_group_rank_cte) that adds a column group_rank that ranks each group based on the order by keys (or by avg val if there are none)
|
||||
*/
|
||||
func (b *MetricQueryStatementBuilder) buildCTEWithGroupRank(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
) string {
|
||||
withGroupByCTEBuilder := sqlbuilder.NewSelectBuilder()
|
||||
withGroupByCTEBuilder.Select("*")
|
||||
|
||||
windowOrder := []string{}
|
||||
orderedKeys := map[string]struct{}{} // this will be used to add the remaining keys as tie breakers in the end
|
||||
if len(query.Order) > 0 {
|
||||
for _, o := range query.Order {
|
||||
key := o.Key.Name
|
||||
if isMetricAggOrderByKey(key, query.Aggregations[0]) {
|
||||
windowOrder = append(windowOrder, fmt.Sprintf("avg_val %s", o.Direction.StringValue()))
|
||||
} else {
|
||||
windowOrder = append(windowOrder, fmt.Sprintf("`%s` %s", key, o.Direction.StringValue()))
|
||||
orderedKeys[fmt.Sprintf("`%s`", key)] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
windowOrder = append(windowOrder, "avg_val DESC")
|
||||
}
|
||||
groupByKeys := querybuilder.GroupByKeys(query.GroupBy)
|
||||
for _, gk := range groupByKeys { // keys that haven't been added via order by keys will be added at the end as tie breakers
|
||||
if _, ok := orderedKeys[gk]; !ok {
|
||||
windowOrder = append(windowOrder, fmt.Sprintf("%s ASC", gk))
|
||||
}
|
||||
}
|
||||
withGroupByCTEBuilder.SelectMore(fmt.Sprintf("dense_rank() OVER (ORDER BY %s) AS group_rank", strings.Join(windowOrder, ",")))
|
||||
|
||||
withGroupByCTEBuilder.From("__with_avg_cte")
|
||||
withGroupRankCTEQuery, _ := withGroupByCTEBuilder.BuildWithFlavor(sqlbuilder.ClickHouse) // no args so second return param is ignored
|
||||
withGroupRankCTE := fmt.Sprintf("__with_group_rank_cte AS (%s)", withGroupRankCTEQuery)
|
||||
return withGroupRankCTE
|
||||
}
|
||||
|
||||
@@ -15,17 +15,16 @@ import (
|
||||
)
|
||||
|
||||
func TestStatementBuilder(t *testing.T) {
|
||||
type baseQuery struct {
|
||||
name string
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]
|
||||
orderKey string
|
||||
args []any
|
||||
cte string
|
||||
}
|
||||
|
||||
bases := []baseQuery{
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "cumulative_rate_sum",
|
||||
name: "test_cumulative_rate_sum",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -41,16 +40,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "cumulative_rate_sum_with_mat_column",
|
||||
name: "test_cumulative_rate_sum_with_mat_column",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -66,16 +73,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "materialized.key.name REGEXP 'cartservice' OR service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "delta_rate_sum",
|
||||
name: "test_delta_rate_sum",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -91,16 +106,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
|
||||
cte: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "histogram_percentile1",
|
||||
name: "test_histogram_percentile1",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -116,38 +139,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
|
||||
cte: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`), __histogram_cte AS (SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts)",
|
||||
},
|
||||
{
|
||||
name: "histogram_percentile2",
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "http_server_duration_bucket",
|
||||
Type: metrictypes.HistogramType,
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`), __histogram_cte AS (SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "gauge_avg_sum",
|
||||
name: "test_gauge_avg_sum",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -163,81 +172,51 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "host.name = 'big-data-node-1'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "host.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "host.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "host.name",
|
||||
args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "test_histogram_percentile2",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "http_server_duration_bucket",
|
||||
Type: metrictypes.HistogramType,
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
},
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
type variant struct {
|
||||
name string
|
||||
limit int
|
||||
hasOrder bool
|
||||
}
|
||||
|
||||
variants := []variant{
|
||||
{"with_limits", 10, false},
|
||||
{"without_limits", 0, false},
|
||||
{"with_order_by", 0, true},
|
||||
{"with_order_by_and_limits", 10, true},
|
||||
}
|
||||
|
||||
sumMetricsFinalSelects := map[string]string{
|
||||
"with_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"without_limits": " SELECT * FROM __spatial_aggregation_cte ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"with_order_by": " SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name` asc, ts ASC",
|
||||
"with_order_by_and_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY `service.name` asc LIMIT 10) ORDER BY `service.name` asc, ts ASC",
|
||||
}
|
||||
|
||||
histogramMetricsFinalSelects := map[string]string{
|
||||
"with_limits": " SELECT * FROM __histogram_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __histogram_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"without_limits": " SELECT * FROM __histogram_cte ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"with_order_by": " SELECT * FROM __histogram_cte ORDER BY `service.name` asc, ts ASC",
|
||||
"with_order_by_and_limits": " SELECT * FROM __histogram_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __histogram_cte GROUP BY `service.name` ORDER BY `service.name` asc LIMIT 10) ORDER BY `service.name` asc, ts ASC",
|
||||
}
|
||||
|
||||
// expectedFinalSelects maps "base/variant" to the final SELECT portion after the CTE.
|
||||
// The full expected query is: base.cte + expectedFinalSelects[name]
|
||||
expectedFinalSelects := map[string]string{
|
||||
// cumulative_rate_sum
|
||||
"cumulative_rate_sum/with_limits": sumMetricsFinalSelects["with_limits"],
|
||||
"cumulative_rate_sum/without_limits": sumMetricsFinalSelects["without_limits"],
|
||||
"cumulative_rate_sum/with_order_by": sumMetricsFinalSelects["with_order_by"],
|
||||
"cumulative_rate_sum/with_order_by_and_limits": sumMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// cumulative_rate_sum_with_mat_column
|
||||
"cumulative_rate_sum_with_mat_column/with_limits": sumMetricsFinalSelects["with_limits"],
|
||||
"cumulative_rate_sum_with_mat_column/without_limits": sumMetricsFinalSelects["without_limits"],
|
||||
"cumulative_rate_sum_with_mat_column/with_order_by": sumMetricsFinalSelects["with_order_by"],
|
||||
"cumulative_rate_sum_with_mat_column/with_order_by_and_limits": sumMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// delta_rate_sum
|
||||
"delta_rate_sum/with_limits": sumMetricsFinalSelects["with_limits"],
|
||||
"delta_rate_sum/without_limits": sumMetricsFinalSelects["without_limits"],
|
||||
"delta_rate_sum/with_order_by": sumMetricsFinalSelects["with_order_by"],
|
||||
"delta_rate_sum/with_order_by_and_limits": sumMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// histogram_percentile1
|
||||
"histogram_percentile1/with_limits": histogramMetricsFinalSelects["with_limits"],
|
||||
"histogram_percentile1/without_limits": histogramMetricsFinalSelects["without_limits"],
|
||||
"histogram_percentile1/with_order_by": histogramMetricsFinalSelects["with_order_by"],
|
||||
"histogram_percentile1/with_order_by_and_limits": histogramMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// histogram_percentile2
|
||||
"histogram_percentile2/with_limits": histogramMetricsFinalSelects["with_limits"],
|
||||
"histogram_percentile2/without_limits": histogramMetricsFinalSelects["without_limits"],
|
||||
"histogram_percentile2/with_order_by": histogramMetricsFinalSelects["with_order_by"],
|
||||
"histogram_percentile2/with_order_by_and_limits": histogramMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// gauge_avg_sum
|
||||
"gauge_avg_sum/with_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`host.name`) IN (SELECT `host.name` FROM __spatial_aggregation_cte GROUP BY `host.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `host.name`) DESC, `host.name`, ts ASC",
|
||||
"gauge_avg_sum/without_limits": " SELECT * FROM __spatial_aggregation_cte ORDER BY avg(value) OVER (PARTITION BY `host.name`) DESC, `host.name`, ts ASC",
|
||||
"gauge_avg_sum/with_order_by": " SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name` asc, ts ASC",
|
||||
"gauge_avg_sum/with_order_by_and_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`host.name`) IN (SELECT `host.name` FROM __spatial_aggregation_cte GROUP BY `host.name` ORDER BY `host.name` asc LIMIT 10) ORDER BY `host.name` asc, ts ASC",
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
@@ -248,13 +227,15 @@ func TestStatementBuilder(t *testing.T) {
|
||||
t.Fatalf("failed to load field keys: %v", err)
|
||||
}
|
||||
mockMetadataStore.KeysMap = keys
|
||||
// NOTE: LoadFieldKeysFromJSON doesn't set Materialized field
|
||||
// for keys, so we have to set it manually here for testing
|
||||
if _, ok := mockMetadataStore.KeysMap["materialized.key.name"]; ok {
|
||||
if len(mockMetadataStore.KeysMap["materialized.key.name"]) > 0 {
|
||||
mockMetadataStore.KeysMap["materialized.key.name"][0].Materialized = true
|
||||
}
|
||||
}
|
||||
|
||||
fl, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create flagger: %v", err)
|
||||
}
|
||||
@@ -264,30 +245,23 @@ func TestStatementBuilder(t *testing.T) {
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
fl,
|
||||
flagger,
|
||||
)
|
||||
|
||||
for _, b := range bases {
|
||||
for _, v := range variants {
|
||||
name := b.name + "/" + v.name
|
||||
t.Run(name, func(t *testing.T) {
|
||||
q := b.query
|
||||
q.Limit = v.limit
|
||||
if v.hasOrder {
|
||||
q.Order = []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: b.orderKey}},
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
},
|
||||
}
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
result, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, qbtypes.RequestTypeTimeSeries, q, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, b.cte+expectedFinalSelects[name], result.Query)
|
||||
require.Equal(t, b.args, result.Args)
|
||||
})
|
||||
}
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,34 +10,35 @@ import (
|
||||
type Account struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
ProviderAccountId *string `json:"providerAccountID,omitempty"`
|
||||
Provider CloudProviderType `json:"provider"`
|
||||
RemovedAt *time.Time `json:"removedAt,omitempty"`
|
||||
AgentReport *AgentReport `json:"agentReport,omitempty"`
|
||||
OrgID valuer.UUID `json:"orgID"`
|
||||
Config *AccountConfig `json:"config,omitempty"`
|
||||
ProviderAccountID *string `json:"providerAccountId" required:"true" nullable:"true"`
|
||||
Provider CloudProviderType `json:"provider" required:"true"`
|
||||
RemovedAt *time.Time `json:"removedAt,omitempty" required:"true" nullable:"true"`
|
||||
AgentReport *AgentReport `json:"agentReport,omitempty" required:"true" nullable:"true"`
|
||||
OrgID valuer.UUID `json:"orgId" required:"true"`
|
||||
Config *AccountConfig `json:"config,omitempty" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
// AgentReport represents heartbeats sent by the agent.
|
||||
type AgentReport struct {
|
||||
TimestampMillis int64 `json:"timestampMillis"`
|
||||
Data map[string]any `json:"data"`
|
||||
TimestampMillis int64 `json:"timestampMillis" required:"true"`
|
||||
Data map[string]any `json:"data" required:"true" nullable:"true"`
|
||||
}
|
||||
|
||||
type AccountConfig struct {
|
||||
// required till new providers are added
|
||||
AWS *AWSAccountConfig `json:"aws,omitempty" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type GettableAccounts struct {
|
||||
Accounts []*Account `json:"accounts"`
|
||||
Accounts []*Account `json:"accounts" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type GettableAccount = Account
|
||||
|
||||
type UpdatableAccount struct {
|
||||
Config *AccountConfig `json:"config"`
|
||||
}
|
||||
|
||||
type AccountConfig struct {
|
||||
AWS *AWSAccountConfig `json:"aws,omitempty"`
|
||||
Config *AccountConfig `json:"config" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type AWSAccountConfig struct {
|
||||
Regions []string `json:"regions"`
|
||||
Regions []string `json:"regions" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
@@ -1,88 +1,74 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
import "time"
|
||||
|
||||
type ConnectionArtifactRequest struct {
|
||||
Aws *AWSConnectionArtifactRequest `json:"aws"`
|
||||
// required till new providers are added
|
||||
Aws *AWSConnectionArtifactRequest `json:"aws" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type AWSConnectionArtifactRequest struct {
|
||||
DeploymentRegion string `json:"deploymentRegion"`
|
||||
Regions []string `json:"regions"`
|
||||
DeploymentRegion string `json:"deploymentRegion" required:"true"`
|
||||
Regions []string `json:"regions" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type PostableConnectionArtifact = ConnectionArtifactRequest
|
||||
|
||||
type ConnectionArtifact struct {
|
||||
Aws *AWSConnectionArtifact `json:"aws"`
|
||||
// required till new providers are added
|
||||
Aws *AWSConnectionArtifact `json:"aws" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type AWSConnectionArtifact struct {
|
||||
ConnectionUrl string `json:"connectionURL"`
|
||||
ConnectionURL string `json:"connectionURL" required:"true"`
|
||||
}
|
||||
|
||||
type GettableConnectionArtifact = ConnectionArtifact
|
||||
|
||||
type AccountStatus struct {
|
||||
Id string `json:"id"`
|
||||
ProviderAccountId *string `json:"providerAccountID,omitempty"`
|
||||
Status integrationtypes.AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
type GettableAccountStatus = AccountStatus
|
||||
|
||||
type AgentCheckInRequest struct {
|
||||
// older backward compatible fields are mapped to new fields
|
||||
// CloudIntegrationId string `json:"cloudIntegrationId"`
|
||||
// AccountId string `json:"accountId"`
|
||||
ProviderAccountID string `json:"providerAccountId" required:"false"`
|
||||
CloudIntegrationID string `json:"cloudIntegrationId" required:"false"`
|
||||
|
||||
// New fields
|
||||
ProviderAccountId string `json:"providerAccountId"`
|
||||
CloudAccountId string `json:"cloudAccountId"`
|
||||
|
||||
Data map[string]any `json:"data,omitempty"`
|
||||
Data map[string]any `json:"data,omitempty" required:"true" nullable:"true"`
|
||||
}
|
||||
|
||||
type PostableAgentCheckInRequest struct {
|
||||
AgentCheckInRequest
|
||||
// following are backward compatible fields for older running agents
|
||||
// which gets mapped to new fields in AgentCheckInRequest
|
||||
CloudIntegrationId string `json:"cloud_integration_id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
}
|
||||
|
||||
type GettableAgentCheckInResponse struct {
|
||||
AgentCheckInResponse
|
||||
|
||||
// For backward compatibility
|
||||
CloudIntegrationId string `json:"cloud_integration_id"`
|
||||
AccountId string `json:"account_id"`
|
||||
ID string `json:"account_id" required:"false"` // => CloudIntegrationID
|
||||
AccountID string `json:"cloud_account_id" required:"false"` // => ProviderAccountID
|
||||
}
|
||||
|
||||
type AgentCheckInResponse struct {
|
||||
// Older fields for backward compatibility are mapped to new fields below
|
||||
// CloudIntegrationId string `json:"cloud_integration_id"`
|
||||
// AccountId string `json:"account_id"`
|
||||
|
||||
// New fields
|
||||
ProviderAccountId string `json:"providerAccountId"`
|
||||
CloudAccountId string `json:"cloudAccountId"`
|
||||
|
||||
// IntegrationConfig populates data related to integration that is required for an agent
|
||||
// to start collecting telemetry data
|
||||
// keeping JSON key snake_case for backward compatibility
|
||||
IntegrationConfig *IntegrationConfig `json:"integration_config,omitempty"`
|
||||
CloudIntegrationID string `json:"cloudIntegrationId" required:"true"`
|
||||
ProviderAccountID string `json:"providerAccountId" required:"true"`
|
||||
IntegrationConfig *ProviderIntegrationConfig `json:"integrationConfig" required:"true"`
|
||||
RemovedAt *time.Time `json:"removedAt" required:"true" nullable:"true"`
|
||||
}
|
||||
|
||||
type IntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabledRegions"` // backward compatible
|
||||
Telemetry *AWSCollectionStrategy `json:"telemetry,omitempty"` // backward compatible
|
||||
type GettableAgentCheckInResponse struct {
|
||||
// Older fields for backward compatibility with existing AWS agents
|
||||
AccountID string `json:"account_id" required:"true"`
|
||||
CloudAccountID string `json:"cloud_account_id" required:"true"`
|
||||
OlderIntegrationConfig *IntegrationConfig `json:"integration_config" required:"true" nullable:"true"`
|
||||
OlderRemovedAt *time.Time `json:"removed_at" required:"true" nullable:"true"`
|
||||
|
||||
// new fields
|
||||
AWS *AWSIntegrationConfig `json:"aws,omitempty"`
|
||||
AgentCheckInResponse
|
||||
}
|
||||
|
||||
// IntegrationConfig older integration config struct for backward compatibility,
|
||||
// this will be eventually removed once agents are updated to use new struct.
|
||||
type IntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabledRegions" required:"true" nullable:"false"` // backward compatible
|
||||
Telemetry *AWSCollectionStrategy `json:"telemetry,omitempty" required:"true" nullable:"false"` // backward compatible
|
||||
}
|
||||
|
||||
type ProviderIntegrationConfig struct {
|
||||
AWS *AWSIntegrationConfig `json:"aws,omitempty" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type AWSIntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabledRegions"`
|
||||
Telemetry *AWSCollectionStrategy `json:"telemetry,omitempty"`
|
||||
EnabledRegions []string `json:"enabledRegions" required:"true" nullable:"false"`
|
||||
Telemetry *AWSCollectionStrategy `json:"telemetry,omitempty" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
@@ -11,19 +11,20 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
S3Sync = valuer.NewString("s3sync")
|
||||
// ErrCodeInvalidServiceID is the error code for invalid service id.
|
||||
ErrCodeInvalidServiceID = errors.MustNewCode("invalid_service_id")
|
||||
)
|
||||
|
||||
type ServiceID struct{ valuer.String }
|
||||
|
||||
type CloudIntegrationService struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Type ServiceID `json:"type"`
|
||||
Config *ServiceConfig `json:"config"`
|
||||
CloudIntegrationID valuer.UUID `json:"cloudIntegrationID"`
|
||||
CloudIntegrationID valuer.UUID `json:"cloudIntegrationId"`
|
||||
}
|
||||
|
||||
type ServiceConfig struct {
|
||||
// required till new providers are added
|
||||
AWS *AWSServiceConfig `json:"aws,omitempty" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
// ServiceMetadata helps to quickly list available services and whether it is enabled or not.
|
||||
@@ -32,26 +33,56 @@ type CloudIntegrationService struct {
|
||||
type ServiceMetadata struct {
|
||||
ServiceDefinitionMetadata
|
||||
// if the service is enabled for the account
|
||||
Enabled bool `json:"enabled"`
|
||||
Enabled bool `json:"enabled" required:"true"`
|
||||
}
|
||||
|
||||
// ServiceDefinitionMetadata represents service definition metadata. This is useful for showing service tab in frontend.
|
||||
type ServiceDefinitionMetadata struct {
|
||||
Id string `json:"id" required:"true"`
|
||||
Title string `json:"title" required:"true"`
|
||||
Icon string `json:"icon" required:"true"`
|
||||
}
|
||||
|
||||
type GettableServicesMetadata struct {
|
||||
Services []*ServiceMetadata `json:"services"`
|
||||
Services []*ServiceMetadata `json:"services" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
ServiceDefinition
|
||||
ServiceConfig *ServiceConfig `json:"serviceConfig"`
|
||||
ServiceConfig *ServiceConfig `json:"serviceConfig" required:"false" nullable:"false"`
|
||||
}
|
||||
|
||||
type GettableService = Service
|
||||
|
||||
type UpdatableService struct {
|
||||
Config *ServiceConfig `json:"config"`
|
||||
Config *ServiceConfig `json:"config" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
type ServiceConfig struct {
|
||||
AWS *AWSServiceConfig `json:"aws,omitempty"`
|
||||
type ServiceDefinition struct {
|
||||
ServiceDefinitionMetadata
|
||||
Overview string `json:"overview" required:"true"` // markdown
|
||||
Assets Assets `json:"assets" required:"true"`
|
||||
SupportedSignals SupportedSignals `json:"supported_signals" required:"true"`
|
||||
DataCollected DataCollected `json:"dataCollected" required:"true"`
|
||||
Strategy *CollectionStrategy `json:"telemetryCollectionStrategy" required:"true" nullable:"false"`
|
||||
}
|
||||
|
||||
// SupportedSignals for cloud provider's service.
|
||||
type SupportedSignals struct {
|
||||
Logs bool `json:"logs"`
|
||||
Metrics bool `json:"metrics"`
|
||||
}
|
||||
|
||||
// DataCollected is curated static list of metrics and logs, this is shown as part of service overview.
|
||||
type DataCollected struct {
|
||||
Logs []CollectedLogAttribute `json:"logs"`
|
||||
Metrics []CollectedMetric `json:"metrics"`
|
||||
}
|
||||
|
||||
// CollectionStrategy is cloud provider specific configuration for signal collection,
|
||||
// this is used by agent to understand the nitty-gritty for collecting telemetry for the cloud provider.
|
||||
type CollectionStrategy struct {
|
||||
AWS *AWSCollectionStrategy `json:"aws,omitempty"`
|
||||
}
|
||||
|
||||
type AWSServiceConfig struct {
|
||||
@@ -70,45 +101,11 @@ type AWSServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
// ServiceDefinitionMetadata represents service definition metadata. This is useful for showing service tab in frontend.
|
||||
type ServiceDefinitionMetadata struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Icon string `json:"icon"`
|
||||
}
|
||||
|
||||
type ServiceDefinition struct {
|
||||
ServiceDefinitionMetadata
|
||||
Overview string `json:"overview"` // markdown
|
||||
Assets Assets `json:"assets"`
|
||||
SupportedSignals SupportedSignals `json:"supported_signals"`
|
||||
DataCollected DataCollected `json:"dataCollected"`
|
||||
Strategy *CollectionStrategy `json:"telemetryCollectionStrategy"`
|
||||
}
|
||||
|
||||
// CollectionStrategy is cloud provider specific configuration for signal collection,
|
||||
// this is used by agent to understand the nitty-gritty for collecting telemetry for the cloud provider.
|
||||
type CollectionStrategy struct {
|
||||
AWS *AWSCollectionStrategy `json:"aws,omitempty"`
|
||||
}
|
||||
|
||||
// Assets represents the collection of dashboards.
|
||||
type Assets struct {
|
||||
Dashboards []Dashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
// SupportedSignals for cloud provider's service.
|
||||
type SupportedSignals struct {
|
||||
Logs bool `json:"logs"`
|
||||
Metrics bool `json:"metrics"`
|
||||
}
|
||||
|
||||
// DataCollected is curated static list of metrics and logs, this is shown as part of service overview.
|
||||
type DataCollected struct {
|
||||
Logs []CollectedLogAttribute `json:"logs"`
|
||||
Metrics []CollectedMetric `json:"metrics"`
|
||||
}
|
||||
|
||||
// CollectedLogAttribute represents a log attribute that is present in all log entries for a service,
|
||||
// this is shown as part of service overview.
|
||||
type CollectedLogAttribute struct {
|
||||
@@ -175,39 +172,6 @@ type Dashboard struct {
|
||||
Definition dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
||||
}
|
||||
|
||||
// SupportedServices is the map of supported services for each cloud provider.
|
||||
var SupportedServices = map[CloudProviderType][]ServiceID{
|
||||
CloudProviderTypeAWS: {
|
||||
{valuer.NewString("alb")},
|
||||
{valuer.NewString("api-gateway")},
|
||||
{valuer.NewString("dynamodb")},
|
||||
{valuer.NewString("ec2")},
|
||||
{valuer.NewString("ecs")},
|
||||
{valuer.NewString("eks")},
|
||||
{valuer.NewString("elasticache")},
|
||||
{valuer.NewString("lambda")},
|
||||
{valuer.NewString("msk")},
|
||||
{valuer.NewString("rds")},
|
||||
{valuer.NewString("s3sync")},
|
||||
{valuer.NewString("sns")},
|
||||
{valuer.NewString("sqs")},
|
||||
},
|
||||
}
|
||||
|
||||
// NewServiceID returns a new ServiceID from a string, validated against the supported services for the given cloud provider.
|
||||
func NewServiceID(provider CloudProviderType, service string) (ServiceID, error) {
|
||||
services, ok := SupportedServices[provider]
|
||||
if !ok {
|
||||
return ServiceID{}, errors.NewInvalidInputf(ErrCodeInvalidServiceID, "no services defined for cloud provider: %s", provider)
|
||||
}
|
||||
for _, s := range services {
|
||||
if s.StringValue() == service {
|
||||
return s, nil
|
||||
}
|
||||
}
|
||||
return ServiceID{}, errors.NewInvalidInputf(ErrCodeInvalidServiceID, "invalid service id %q for cloud provider %s", service, provider)
|
||||
}
|
||||
|
||||
// UTILS
|
||||
|
||||
// GetCloudIntegrationDashboardID returns the dashboard id for a cloud integration, given the cloud provider, service id, and dashboard id.
|
||||
|
||||
75
pkg/types/cloudintegrationtypes/serviceid.go
Normal file
75
pkg/types/cloudintegrationtypes/serviceid.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type ServiceID struct{ valuer.String }
|
||||
|
||||
var (
|
||||
AWSServiceALB = ServiceID{valuer.NewString("alb")}
|
||||
AWSServiceAPIGateway = ServiceID{valuer.NewString("api-gateway")}
|
||||
AWSServiceDynamoDB = ServiceID{valuer.NewString("dynamodb")}
|
||||
AWSServiceEC2 = ServiceID{valuer.NewString("ec2")}
|
||||
AWSServiceECS = ServiceID{valuer.NewString("ecs")}
|
||||
AWSServiceEKS = ServiceID{valuer.NewString("eks")}
|
||||
AWSServiceElastiCache = ServiceID{valuer.NewString("elasticache")}
|
||||
AWSServiceLambda = ServiceID{valuer.NewString("lambda")}
|
||||
AWSServiceMSK = ServiceID{valuer.NewString("msk")}
|
||||
AWSServiceRDS = ServiceID{valuer.NewString("rds")}
|
||||
AWSServiceS3Sync = ServiceID{valuer.NewString("s3sync")}
|
||||
AWSServiceSNS = ServiceID{valuer.NewString("sns")}
|
||||
AWSServiceSQS = ServiceID{valuer.NewString("sqs")}
|
||||
)
|
||||
|
||||
func (ServiceID) Enum() []any {
|
||||
return []any{
|
||||
AWSServiceALB,
|
||||
AWSServiceAPIGateway,
|
||||
AWSServiceDynamoDB,
|
||||
AWSServiceEC2,
|
||||
AWSServiceECS,
|
||||
AWSServiceEKS,
|
||||
AWSServiceElastiCache,
|
||||
AWSServiceLambda,
|
||||
AWSServiceMSK,
|
||||
AWSServiceRDS,
|
||||
AWSServiceS3Sync,
|
||||
AWSServiceSNS,
|
||||
AWSServiceSQS,
|
||||
}
|
||||
}
|
||||
|
||||
// SupportedServices is the map of supported services for each cloud provider.
|
||||
var SupportedServices = map[CloudProviderType][]ServiceID{
|
||||
CloudProviderTypeAWS: {
|
||||
AWSServiceALB,
|
||||
AWSServiceAPIGateway,
|
||||
AWSServiceDynamoDB,
|
||||
AWSServiceEC2,
|
||||
AWSServiceECS,
|
||||
AWSServiceEKS,
|
||||
AWSServiceElastiCache,
|
||||
AWSServiceLambda,
|
||||
AWSServiceMSK,
|
||||
AWSServiceRDS,
|
||||
AWSServiceS3Sync,
|
||||
AWSServiceSNS,
|
||||
AWSServiceSQS,
|
||||
},
|
||||
}
|
||||
|
||||
// NewServiceID returns a new ServiceID from a string, validated against the supported services for the given cloud provider.
|
||||
func NewServiceID(provider CloudProviderType, service string) (ServiceID, error) {
|
||||
services, ok := SupportedServices[provider]
|
||||
if !ok {
|
||||
return ServiceID{}, errors.NewInvalidInputf(ErrCodeInvalidServiceID, "no services defined for cloud provider: %s", provider)
|
||||
}
|
||||
for _, s := range services {
|
||||
if s.StringValue() == service {
|
||||
return s, nil
|
||||
}
|
||||
}
|
||||
return ServiceID{}, errors.NewInvalidInputf(ErrCodeInvalidServiceID, "invalid service id %q for cloud provider %s", service, provider)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user