Compare commits

..

6 Commits

Author SHA1 Message Date
SagarRajput-7
9ad82585d0 feat: refactored the api query hooks usage and other refactoring 2026-02-09 10:02:46 +05:30
SagarRajput-7
53544eca90 Merge branch 'main' into gateway-api-keys 2026-02-09 07:15:14 +05:30
SagarRajput-7
123ca4726b Merge branch 'main' into gateway-api-keys 2026-02-06 14:01:17 +05:30
SagarRajput-7
65dcd1e727 feat: updated test case and refactored code 2026-02-06 11:47:39 +05:30
SagarRajput-7
631550b7f5 Merge branch 'main' into gateway-api-keys 2026-02-06 11:01:19 +05:30
SagarRajput-7
de50581256 feat: upgraded the ingestion gateway apis 2026-02-05 17:41:33 +05:30
29 changed files with 884 additions and 2671 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -9,8 +9,8 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -22,8 +22,6 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -58,6 +56,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -109,22 +108,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.Handle("/api/v5/query_range", handler.New(am.ViewAccess(ah.queryRangeV5), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
})).Methods(http.MethodPost)
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)

View File

@@ -237,6 +237,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -34,230 +34,159 @@ const themeColors = {
cyan: '#00FFFF',
},
chartcolors: {
// Blues (3)
dodgerBlue: '#2F80ED',
royalBlue: '#3366E6',
steelBlue: '#4682B4',
// Teals / Cyans (3)
turquoise: '#00CEC9',
lagoon: '#1ABC9C',
cyanBright: '#22A6F2',
// Greens (3)
emeraldGreen: '#27AE60',
mediumSeaGreen: '#3CB371',
limeGreen: '#A3E635',
// Yellows / Golds (3)
festivalYellow: '#F2C94C',
sunflower: '#FFD93D',
warmAmber: '#FFCA28',
// Oranges (3)
festivalOrange: '#F2994A',
coralOrange: '#E17055',
pumpkin: '#FF7F50',
// Reds (3)
radicalRed: '#FF1A66',
crimsonRed: '#EB5757',
fireRed: '#E10600',
// Pinks (3)
hotPink: '#E84393',
rosePink: '#FD79A8',
blush: '#FF7EB6',
// Purples / Violets (3)
mediumPurple: '#BB6BD9',
royalPurple: '#9B51E0',
orchid: '#DA77F2',
// Accent / Neon / Unique Colors (3)
neonViolet: '#C77DFF',
electricPurple: '#6C5CE7',
arcticBlue: '#48DBFB',
// Extended palette — systematic variations to reach 100 colors
blue1: '#1F63E0',
blue2: '#3A7AED',
blue3: '#5A9DF5',
cyan1: '#00B0AA',
cyan2: '#33D6C2',
cyan3: '#66E9DA',
green1: '#1E8449',
green2: '#2ECC71',
green3: '#58D68D',
yellow1: '#F1C40F',
yellow2: '#F7DC6F',
yellow3: '#F9E79F',
orange1: '#D35400',
orange2: '#E67E22',
orange3: '#F5B041',
red1: '#C0392B',
red2: '#E74C3C',
red3: '#EC7063',
pink1: '#D81B60',
pink2: '#E91E63',
pink3: '#F06292',
purple1: '#8E44AD',
purple2: '#9B59B6',
purple3: '#BB8FCE',
teal1: '#009688',
teal2: '#1ABC9C',
teal3: '#48C9B0',
lime1: '#A3E635',
lime2: '#B9F18D',
lime3: '#D4FFB0',
gold1: '#F39C12',
gold2: '#F1C40F',
gold3: '#F7DC6F',
coral1: '#E67E22',
coral2: '#F39C12',
coral3: '#F5B041',
crimson1: '#C0392B',
crimson2: '#E74C3C',
crimson3: '#EC7063',
violet1: '#8E44AD',
violet2: '#9B59B6',
violet3: '#BB8FCE',
aqua1: '#00BFFF',
aqua2: '#1E90FF',
aqua3: '#63B8FF',
forest1: '#27AE60',
forest2: '#2ECC71',
forest3: '#58D68D',
blush1: '#FF6F91',
blush2: '#FF85A2',
blush3: '#FFA0B3',
lavender1: '#9B59B6',
lavender2: '#AF7AC5',
lavender3: '#C39BD3',
tomato1: '#E74C3C',
tomato2: '#EC7063',
tomato3: '#F1948A',
salmon1: '#FF6B6B',
salmon2: '#FF8787',
salmon3: '#FFA1A1',
mustard1: '#F1C40F',
mustard2: '#F7DC6F',
mustard3: '#F9E79F',
teal4: '#1ABC9C',
teal5: '#48C9B0',
teal6: '#76D7C4',
magenta1: '#D6336C',
magenta2: '#E84393',
magenta3: '#F06292',
violet4: '#7D3C98',
violet5: '#8E44AD',
violet6: '#9B59B6',
green4: '#229954',
green5: '#27AE60',
green6: '#52BE80',
blue4: '#2874A6',
blue5: '#2E86C1',
blue6: '#3498DB',
red4: '#C0392B',
red5: '#E74C3C',
red6: '#EC7063',
orange4: '#D35400',
orange5: '#E67E22',
orange6: '#EB984E',
pink4: '#C2185B',
pink5: '#D81B60',
pink6: '#E91E63',
gold4: '#B7950B',
gold5: '#F1C40F',
gold6: '#F4D03F',
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
seaGreen: '#219653',
turquoiseBlue: '#56CCF2',
festivalOrange: '#F2C94C',
silver: '#BDBDBD',
outrageousOrange: '#FF6633',
roseBud: '#FFB399',
canary: '#FFFF99',
deepSkyBlue: '#00B3E6',
goldTips: '#E6B333',
royalBlue: '#3366E6',
avocado: '#999966',
mintGreen: '#99FF99',
chestnut: '#B34D4D',
lima: '#80B300',
olive: '#809900',
beautyBush: '#E6B3B3',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRose: '#FF99E6',
electricLime: '#CCFF1A',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
turquoise: '#33FFCC',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
yellow: '#FFFF00',
purple: '#800080',
cyan: '#00FFFF',
magenta: '#FF00FF',
orange: '#FFA500',
pink: '#FFC0CB',
brown: '#A52A2A',
teal: '#008080',
lime: '#00FF00',
maroon: '#800000',
navy: '#000080',
aquamarine: '#7FFFD4',
darkSeaGreen: '#8FBC8F',
gray: '#808080',
skyBlue: '#87CEEB',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peru: '#CD853F',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrown: '#BC8F8F',
darkSlateGray: '#2F4F4F',
mediumAquamarine: '#66CDAA',
lavender: '#E6E6FA',
thistle: '#D8BFD8',
salmon: '#FA8072',
darkSalmon: '#E9967A',
paleVioletRed: '#DB7093',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
lawnGreen: '#7CFC00',
mediumSeaGreen: '#3CB371',
lightCoral: '#F08080',
gold: '#FFD700',
sandyBrown: '#F4A460',
darkKhaki: '#BDB76B',
cornflowerBlue: '#6495ED',
mediumVioletRed: '#C71585',
paleGreen: '#98FB98',
},
lightModeColor: {
radicalRed: '#D81B60',
dodgerBlueDark: '#1E5BD9',
steelgrey: '#344B6B',
steelpurple: '#5E548E',
steelindigo: '#8E4A7C',
steelpink: '#B63A6F',
steelcoral: '#E14B5A',
steelorange: '#E76F2F',
steelgold: '#E09B00',
steelrust: '#C93A50',
steelgreen: '#2F7D69',
mediumOrchidDark: '#8E24AA',
seaBuckthornDark: '#C75A00',
seaGreen: '#1E7F5A',
turquoiseBlueDark: '#007EA7',
silverDark: '#5F5F5F',
outrageousOrangeDark: '#E64A19',
roseBudDark: '#D84315',
deepSkyBlueDark: '#0277BD',
royalBlue: '#2A4FDB',
avocadoDark: '#6B6B1E',
mintGreenDark: '#2E9E55',
chestnut: '#8B3A3A',
limaDark: '#5C7F00',
olive: '#6E7F00',
beautyBushDark: '#C93C3C',
danube: '#4F6FB3',
oliveDrab: '#4F7F1A',
lavenderRoseDark: '#B0178F',
electricLimeDark: '#6B8F00',
robin: '#2F4FCC',
harleyOrange: '#CC2E12',
gladeGreen: '#4F7F46',
hemlock: '#5C5C45',
vidaLoca: '#3D6B00',
rust: '#993300',
red: '#C62828',
blue: '#1A237E',
green: '#1B7F3A',
purple: '#6A1B9A',
magentaDark: '#B000B5',
pinkDark: '#C2185B',
brown: '#7A3A1E',
teal: '#006D6F',
limeDark: '#4C8C2B',
maroon: '#6D1B1B',
navy: '#0D1B5E',
gray: '#616161',
skyBlueDark: '#0288D1',
indigo: '#303F9F',
slateGray: '#556B7C',
chocolate: '#9C4A1A',
tomato: '#E53935',
steelBlue: '#3A6EA5',
peruDark: '#B35E00',
darkOliveGreen: '#445B1F',
indianRed: '#B04040',
mediumSlateBlue: '#5C6BC0',
rosyBrownDark: '#A94444',
darkSlateGray: '#2E4A4A',
fuchsia: '#C511C5',
salmonDark: '#E64A3C',
darkSalmonDark: '#C85A3A',
paleVioletRedDark: '#C2186A',
mediumPurple: '#7E57C2',
darkOrchid: '#7B1FA2',
mediumSeaGreenDark: '#2E8B57',
lightCoralDark: '#E57373',
gold: '#D4AF37',
sandyBrownDark: '#C76A15',
darkKhakiDark: '#8A7F00',
cornflowerBlueDark: '#355FCC',
mediumVioletRed: '#AD1457',
paleGreenDark: '#2E7D32',
radicalRed: '#FF1A66',
dodgerBlueDark: '#0C6EED',
steelgrey: '#2f4b7c',
steelpurple: '#665191',
steelindigo: '#a05195',
steelpink: '#d45087',
steelcoral: '#f95d6a',
steelorange: '#ff7c43',
steelgold: '#ffa600',
steelrust: '#de425b',
steelgreen: '#41967e',
mediumOrchidDark: '#C326FD',
seaBuckthornDark: '#E66E05',
seaGreen: '#219653',
turquoiseBlueDark: '#0099CC',
silverDark: '#757575',
outrageousOrangeDark: '#F9521A',
roseBudDark: '#EB6437',
deepSkyBlueDark: '#0595BD',
royalBlue: '#3366E6',
avocadoDark: '#8E8E29',
mintGreenDark: '#00C700',
chestnut: '#B34D4D',
limaDark: '#6E9900',
olive: '#809900',
beautyBushDark: '#E25555',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRoseDark: '#F024BD',
electricLimeDark: '#84A800',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
purple: '#800080',
magentaDark: '#EB00EB',
pinkDark: '#FF3D5E',
brown: '#A52A2A',
teal: '#008080',
limeDark: '#07A207',
maroon: '#800000',
navy: '#000080',
gray: '#808080',
skyBlueDark: '#0CA7E4',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peruDark: '#D16E0A',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrownDark: '#CB4848',
darkSlateGray: '#2F4F4F',
fuchsia: '#FF0AFF',
salmonDark: '#FF432E',
darkSalmonDark: '#D26541',
paleVioletRedDark: '#E83089',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
mediumSeaGreenDark: '#109E50',
lightCoralDark: '#F85959',
gold: '#FFD700',
sandyBrownDark: '#D97117',
darkKhakiDark: '#99900A',
cornflowerBlueDark: '#3371E6',
mediumVioletRed: '#C71585',
paleGreenDark: '#0D910D',
},
errorColor: '#d32f2f',
royalGrey: '#888888',

View File

@@ -2,7 +2,6 @@
/* eslint-disable jsx-a11y/click-events-have-key-events */
import { ChangeEvent, useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useMutation } from 'react-query';
import { useHistory } from 'react-router-dom';
import { useCopyToClipboard } from 'react-use';
import { Color } from '@signozhq/design-tokens';
@@ -27,12 +26,20 @@ import {
} from 'antd';
import { NotificationInstance } from 'antd/es/notification/interface';
import { CollapseProps } from 'antd/lib';
import createIngestionKeyApi from 'api/IngestionKeys/createIngestionKey';
import deleteIngestionKey from 'api/IngestionKeys/deleteIngestionKey';
import createLimitForIngestionKeyApi from 'api/IngestionKeys/limits/createLimitsForKey';
import deleteLimitsForIngestionKey from 'api/IngestionKeys/limits/deleteLimitsForIngestionKey';
import updateLimitForIngestionKeyApi from 'api/IngestionKeys/limits/updateLimitsForIngestionKey';
import updateIngestionKey from 'api/IngestionKeys/updateIngestionKey';
import {
useCreateIngestionKey,
useCreateIngestionKeyLimit,
useDeleteIngestionKey,
useDeleteIngestionKeyLimit,
useGetIngestionKeys,
useSearchIngestionKeys,
useUpdateIngestionKey,
useUpdateIngestionKeyLimit,
} from 'api/generated/services/gateway';
import {
GatewaytypesIngestionKeyDTO,
RenderErrorResponseDTO,
} from 'api/generated/services/sigNoz.schemas';
import { AxiosError } from 'axios';
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import Tags from 'components/Tags/Tags';
@@ -44,7 +51,6 @@ import ROUTES from 'constants/routes';
import { INITIAL_ALERT_THRESHOLD_STATE } from 'container/CreateAlertV2/context/constants';
import dayjs from 'dayjs';
import { useGetGlobalConfig } from 'hooks/globalConfig/useGetGlobalConfig';
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useNotifications } from 'hooks/useNotifications';
import { cloneDeep, isNil, isUndefined } from 'lodash-es';
@@ -66,16 +72,12 @@ import {
} from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useTimezone } from 'providers/Timezone';
import { ErrorResponse } from 'types/api';
import {
AddLimitProps,
LimitProps,
UpdateLimitProps,
} from 'types/api/ingestionKeys/limits/types';
import {
IngestionKeyProps,
PaginationProps,
} from 'types/api/ingestionKeys/types';
import { PaginationProps } from 'types/api/ingestionKeys/types';
import { MeterAggregateOperator } from 'types/common/queryBuilder';
import { USER_ROLES } from 'types/roles';
import { getDaysUntilExpiry } from 'utils/timeUtils';
@@ -86,6 +88,10 @@ const { Option } = Select;
const BYTES = 1073741824;
const INITIAL_PAGE_SIZE = 10;
const SEARCH_PAGE_SIZE = 100;
const FIRST_PAGE = 1;
const COUNT_MULTIPLIER = {
thousand: 1000,
million: 1000000,
@@ -111,6 +117,8 @@ export const showErrorNotification = (
): void => {
notifications.error({
message: err.message || SOMETHING_WENT_WRONG,
description: (err as AxiosError<RenderErrorResponseDTO>).response?.data?.error
?.message,
});
};
@@ -163,15 +171,20 @@ function MultiIngestionSettings(): JSX.Element {
const [updatedTags, setUpdatedTags] = useState<string[]>([]);
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
const [isEditAddLimitOpen, setIsEditAddLimitOpen] = useState(false);
const [activeAPIKey, setActiveAPIKey] = useState<IngestionKeyProps | null>();
const [
activeAPIKey,
setActiveAPIKey,
] = useState<GatewaytypesIngestionKeyDTO | null>();
const [activeSignal, setActiveSignal] = useState<LimitProps | null>(null);
const [searchValue, setSearchValue] = useState<string>('');
const [searchText, setSearchText] = useState<string>('');
const [dataSource, setDataSource] = useState<IngestionKeyProps[]>([]);
const [dataSource, setDataSource] = useState<GatewaytypesIngestionKeyDTO[]>(
[],
);
const [paginationParams, setPaginationParams] = useState<PaginationProps>({
page: 1,
per_page: 10,
page: FIRST_PAGE,
per_page: INITIAL_PAGE_SIZE,
});
const [totalIngestionKeys, setTotalIngestionKeys] = useState(0);
@@ -186,7 +199,7 @@ function MultiIngestionSettings(): JSX.Element {
const [
createLimitForIngestionKeyError,
setCreateLimitForIngestionKeyError,
] = useState<ErrorResponse | null>(null);
] = useState<string | null>(null);
const [
hasUpdateLimitForIngestionKeyError,
@@ -196,7 +209,7 @@ function MultiIngestionSettings(): JSX.Element {
const [
updateLimitForIngestionKeyError,
setUpdateLimitForIngestionKeyError,
] = useState<ErrorResponse | null>(null);
] = useState<string | null>(null);
const { t } = useTranslation(['ingestionKeys']);
@@ -216,7 +229,11 @@ function MultiIngestionSettings(): JSX.Element {
handleFormReset();
};
const showDeleteModal = (apiKey: IngestionKeyProps): void => {
const showDeleteModal = (apiKey: GatewaytypesIngestionKeyDTO): void => {
setHasCreateLimitForIngestionKeyError(false);
setCreateLimitForIngestionKeyError(null);
setHasUpdateLimitForIngestionKeyError(false);
setUpdateLimitForIngestionKeyError(null);
setActiveAPIKey(apiKey);
setIsDeleteModalOpen(true);
};
@@ -233,7 +250,11 @@ function MultiIngestionSettings(): JSX.Element {
setIsAddModalOpen(false);
};
const showEditModal = (apiKey: IngestionKeyProps): void => {
const showEditModal = (apiKey: GatewaytypesIngestionKeyDTO): void => {
setHasCreateLimitForIngestionKeyError(false);
setCreateLimitForIngestionKeyError(null);
setHasUpdateLimitForIngestionKeyError(false);
setUpdateLimitForIngestionKeyError(null);
setActiveAPIKey(apiKey);
handleFormReset();
setUpdatedTags(apiKey.tags || []);
@@ -248,6 +269,10 @@ function MultiIngestionSettings(): JSX.Element {
};
const showAddModal = (): void => {
setHasCreateLimitForIngestionKeyError(false);
setCreateLimitForIngestionKeyError(null);
setHasUpdateLimitForIngestionKeyError(false);
setUpdateLimitForIngestionKeyError(null);
setUpdatedTags([]);
setActiveAPIKey(null);
setIsAddModalOpen(true);
@@ -258,25 +283,60 @@ function MultiIngestionSettings(): JSX.Element {
setActiveSignal(null);
};
// Use search API when searchText is present, otherwise use normal get API
const isSearching = searchText.length > 0;
const {
data: IngestionKeys,
isLoading,
isRefetching,
refetch: refetchAPIKeys,
error,
isError,
} = useGetAllIngestionsKeys({
search: searchText,
...paginationParams,
});
data: ingestionKeysData,
isLoading: isLoadingGet,
isRefetching: isRefetchingGet,
refetch: refetchGetAPIKeys,
error: getError,
isError: isGetError,
} = useGetIngestionKeys(
{
...paginationParams,
},
{
query: {
enabled: !isSearching,
},
},
);
const {
data: searchIngestionKeysData,
isLoading: isLoadingSearch,
isRefetching: isRefetchingSearch,
refetch: refetchSearchAPIKeys,
error: searchError,
isError: isSearchError,
} = useSearchIngestionKeys(
{
page: FIRST_PAGE,
per_page: SEARCH_PAGE_SIZE,
name: searchText,
},
{
query: {
enabled: isSearching,
},
},
);
// Use the appropriate data based on which API is active
const IngestionKeys = isSearching
? searchIngestionKeysData
: ingestionKeysData;
const isLoading = isSearching ? isLoadingSearch : isLoadingGet;
const isRefetching = isSearching ? isRefetchingSearch : isRefetchingGet;
const refetchAPIKeys = isSearching ? refetchSearchAPIKeys : refetchGetAPIKeys;
const error = isSearching ? searchError : getError;
const isError = isSearching ? isSearchError : isGetError;
useEffect(() => {
setActiveAPIKey(IngestionKeys?.data.data[0]);
}, [IngestionKeys]);
useEffect(() => {
setDataSource(IngestionKeys?.data.data || []);
setTotalIngestionKeys(IngestionKeys?.data?._pagination?.total || 0);
setDataSource(IngestionKeys?.data.data?.keys || []);
setTotalIngestionKeys(IngestionKeys?.data?.data?._pagination?.total || 0);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [IngestionKeys?.data?.data]);
@@ -297,6 +357,7 @@ function MultiIngestionSettings(): JSX.Element {
const clearSearch = (): void => {
setSearchValue('');
setSearchText('');
};
const {
@@ -309,101 +370,54 @@ function MultiIngestionSettings(): JSX.Element {
const {
mutate: createIngestionKey,
isLoading: isLoadingCreateAPIKey,
} = useMutation(createIngestionKeyApi, {
onSuccess: (data) => {
setActiveAPIKey(data.payload);
setUpdatedTags([]);
hideAddViewModal();
refetchAPIKeys();
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
});
} = useCreateIngestionKey<AxiosError<RenderErrorResponseDTO>>();
const { mutate: updateAPIKey, isLoading: isLoadingUpdateAPIKey } = useMutation(
updateIngestionKey,
{
onSuccess: () => {
refetchAPIKeys();
setIsEditModalOpen(false);
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
const {
mutate: updateAPIKey,
isLoading: isLoadingUpdateAPIKey,
} = useUpdateIngestionKey<AxiosError<RenderErrorResponseDTO>>();
const { mutate: deleteAPIKey, isLoading: isDeleteingAPIKey } = useMutation(
deleteIngestionKey,
{
onSuccess: () => {
refetchAPIKeys();
setIsDeleteModalOpen(false);
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
const {
mutate: deleteAPIKey,
isLoading: isDeleteingAPIKey,
} = useDeleteIngestionKey<AxiosError<RenderErrorResponseDTO>>();
const {
mutate: createLimitForIngestionKey,
isLoading: isLoadingLimitForKey,
} = useMutation(createLimitForIngestionKeyApi, {
onSuccess: () => {
setActiveSignal(null);
setActiveAPIKey(null);
setIsEditAddLimitOpen(false);
setUpdatedTags([]);
hideAddViewModal();
refetchAPIKeys();
setHasCreateLimitForIngestionKeyError(false);
},
onError: (error: ErrorResponse) => {
setHasCreateLimitForIngestionKeyError(true);
setCreateLimitForIngestionKeyError(error);
},
});
} = useCreateIngestionKeyLimit<AxiosError<RenderErrorResponseDTO>>();
const {
mutate: updateLimitForIngestionKey,
isLoading: isLoadingUpdatedLimitForKey,
} = useMutation(updateLimitForIngestionKeyApi, {
onSuccess: () => {
setActiveSignal(null);
setActiveAPIKey(null);
setIsEditAddLimitOpen(false);
setUpdatedTags([]);
hideAddViewModal();
refetchAPIKeys();
setHasUpdateLimitForIngestionKeyError(false);
},
onError: (error: ErrorResponse) => {
setHasUpdateLimitForIngestionKeyError(true);
setUpdateLimitForIngestionKeyError(error);
},
});
} = useUpdateIngestionKeyLimit<AxiosError<RenderErrorResponseDTO>>();
const { mutate: deleteLimitForKey, isLoading: isDeletingLimit } = useMutation(
deleteLimitsForIngestionKey,
{
onSuccess: () => {
setIsDeleteModalOpen(false);
setIsDeleteLimitModalOpen(false);
refetchAPIKeys();
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
const {
mutate: deleteLimitForKey,
isLoading: isDeletingLimit,
} = useDeleteIngestionKeyLimit<AxiosError<RenderErrorResponseDTO>>();
const onDeleteHandler = (): void => {
clearSearch();
if (activeAPIKey) {
deleteAPIKey(activeAPIKey.id);
if (activeAPIKey && activeAPIKey.id) {
deleteAPIKey(
{
pathParams: { keyId: activeAPIKey.id },
},
{
onSuccess: () => {
notifications.success({
message: 'Ingestion key deleted successfully',
});
refetchAPIKeys();
setIsDeleteModalOpen(false);
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
}
};
@@ -411,15 +425,31 @@ function MultiIngestionSettings(): JSX.Element {
editForm
.validateFields()
.then((values) => {
if (activeAPIKey) {
updateAPIKey({
id: activeAPIKey.id,
data: {
name: values.name,
tags: updatedTags,
expires_at: dayjs(values.expires_at).endOf('day').toISOString(),
if (activeAPIKey && activeAPIKey.id) {
updateAPIKey(
{
pathParams: { keyId: activeAPIKey.id },
data: {
name: values.name,
tags: updatedTags,
expires_at: new Date(
dayjs(values.expires_at).endOf('day').toISOString(),
),
},
},
});
{
onSuccess: () => {
notifications.success({
message: 'Ingestion key updated successfully',
});
refetchAPIKeys();
setIsEditModalOpen(false);
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
}
})
.catch((errorInfo) => {
@@ -435,10 +465,30 @@ function MultiIngestionSettings(): JSX.Element {
const requestPayload = {
name: values.name,
tags: updatedTags,
expires_at: dayjs(values.expires_at).endOf('day').toISOString(),
expires_at: new Date(dayjs(values.expires_at).endOf('day').toISOString()),
};
createIngestionKey(requestPayload);
createIngestionKey(
{
data: requestPayload,
},
{
onSuccess: (_data) => {
notifications.success({
message: 'Ingestion key created successfully',
});
// The new API returns GatewaytypesGettableCreatedIngestionKeyDTO with only id and value
// We rely on refetchAPIKeys to get the full key object
setActiveAPIKey(null);
setUpdatedTags([]);
hideAddViewModal();
refetchAPIKeys();
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
}
})
.catch((errorInfo) => {
@@ -465,7 +515,7 @@ function MultiIngestionSettings(): JSX.Element {
formatTimezoneAdjustedTimestamp(date, DATE_TIME_FORMATS.UTC_MONTH_COMPACT);
const showDeleteLimitModal = (
APIKey: IngestionKeyProps,
APIKey: GatewaytypesIngestionKeyDTO,
limit: LimitProps,
): void => {
setActiveAPIKey(APIKey);
@@ -489,7 +539,7 @@ function MultiIngestionSettings(): JSX.Element {
/* eslint-disable sonarjs/cognitive-complexity */
const handleAddLimit = (
APIKey: IngestionKeyProps,
APIKey: GatewaytypesIngestionKeyDTO,
signalName: string,
): void => {
const {
@@ -502,7 +552,7 @@ function MultiIngestionSettings(): JSX.Element {
} = addEditLimitForm.getFieldsValue();
const payload: AddLimitProps = {
keyID: APIKey.id,
keyID: APIKey.id || '',
signal: signalName,
config: {},
};
@@ -576,11 +626,39 @@ function MultiIngestionSettings(): JSX.Element {
return;
}
createLimitForIngestionKey(payload);
createLimitForIngestionKey(
{
pathParams: { keyId: payload.keyID },
data: {
signal: payload.signal,
config: payload.config,
},
},
{
onSuccess: () => {
notifications.success({
message: 'Limit created successfully',
});
setActiveSignal(null);
setActiveAPIKey(null);
setIsEditAddLimitOpen(false);
setUpdatedTags([]);
hideAddViewModal();
refetchAPIKeys();
setHasCreateLimitForIngestionKeyError(false);
},
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
setHasCreateLimitForIngestionKeyError(true);
setCreateLimitForIngestionKeyError(
error.response?.data?.error?.message || 'Failed to create limit',
);
},
},
);
};
const handleUpdateLimit = (
APIKey: IngestionKeyProps,
APIKey: GatewaytypesIngestionKeyDTO,
signal: LimitProps,
): void => {
const {
@@ -644,7 +722,34 @@ function MultiIngestionSettings(): JSX.Element {
}
}
updateLimitForIngestionKey(payload);
updateLimitForIngestionKey(
{
pathParams: { limitId: payload.limitID },
data: {
config: payload.config,
},
},
{
onSuccess: () => {
notifications.success({
message: 'Limit updated successfully',
});
setActiveSignal(null);
setActiveAPIKey(null);
setIsEditAddLimitOpen(false);
setUpdatedTags([]);
hideAddViewModal();
refetchAPIKeys();
setHasUpdateLimitForIngestionKeyError(false);
},
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
setHasUpdateLimitForIngestionKeyError(true);
setUpdateLimitForIngestionKeyError(
error.response?.data?.error?.message || 'Failed to update limit',
);
},
},
);
};
/* eslint-enable sonarjs/cognitive-complexity */
@@ -656,7 +761,7 @@ function MultiIngestionSettings(): JSX.Element {
};
const enableEditLimitMode = (
APIKey: IngestionKeyProps,
APIKey: GatewaytypesIngestionKeyDTO,
signal: LimitProps,
): void => {
const dayCount = signal?.config?.day?.count;
@@ -665,6 +770,11 @@ function MultiIngestionSettings(): JSX.Element {
const dayCountConverted = countToUnit(dayCount || 0);
const secondCountConverted = countToUnit(secondCount || 0);
setHasCreateLimitForIngestionKeyError(false);
setCreateLimitForIngestionKeyError(null);
setHasUpdateLimitForIngestionKeyError(false);
setUpdateLimitForIngestionKeyError(null);
setActiveAPIKey(APIKey);
setActiveSignal({
...signal,
@@ -703,14 +813,31 @@ function MultiIngestionSettings(): JSX.Element {
const onDeleteLimitHandler = (): void => {
if (activeSignal && activeSignal.id) {
deleteLimitForKey(activeSignal.id);
deleteLimitForKey(
{
pathParams: { limitId: activeSignal.id },
},
{
onSuccess: () => {
notifications.success({
message: 'Limit deleted successfully',
});
setIsDeleteModalOpen(false);
setIsDeleteLimitModalOpen(false);
refetchAPIKeys();
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
}
};
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const handleCreateAlert = (
APIKey: IngestionKeyProps,
APIKey: GatewaytypesIngestionKeyDTO,
signal: LimitProps,
): void => {
let metricName = '';
@@ -771,31 +898,61 @@ function MultiIngestionSettings(): JSX.Element {
history.push(URL);
};
const columns: AntDTableProps<IngestionKeyProps>['columns'] = [
const columns: AntDTableProps<GatewaytypesIngestionKeyDTO>['columns'] = [
{
title: 'Ingestion Key',
key: 'ingestion-key',
// eslint-disable-next-line sonarjs/cognitive-complexity
render: (APIKey: IngestionKeyProps): JSX.Element => {
const createdOn = getFormattedTime(
APIKey.created_at,
formatTimezoneAdjustedTimestamp,
);
render: (APIKey: GatewaytypesIngestionKeyDTO): JSX.Element => {
const createdOn = APIKey?.created_at
? getFormattedTime(
dayjs(APIKey.created_at).toISOString(),
formatTimezoneAdjustedTimestamp,
)
: '';
const expiresOn =
!APIKey?.expires_at || APIKey?.expires_at === '0001-01-01T00:00:00Z'
!APIKey?.expires_at ||
dayjs(APIKey?.expires_at).toISOString() === '0001-01-01T00:00:00.000Z'
? 'No Expiry'
: getFormattedTime(APIKey?.expires_at, formatTimezoneAdjustedTimestamp);
: getFormattedTime(
dayjs(APIKey?.expires_at).toISOString(),
formatTimezoneAdjustedTimestamp,
);
const updatedOn = getFormattedTime(
APIKey?.updated_at,
formatTimezoneAdjustedTimestamp,
);
const updatedOn = APIKey?.updated_at
? getFormattedTime(
dayjs(APIKey.updated_at).toISOString(),
formatTimezoneAdjustedTimestamp,
)
: '';
const onCopyKey = (e: React.MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
if (APIKey?.value) {
handleCopyKey(APIKey.value);
}
};
const onEditKey = (e: React.MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
showEditModal(APIKey);
};
const onDeleteKey = (e: React.MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
showDeleteModal(APIKey);
};
// Convert array of limits to a dictionary for quick access
const limitsDict: Record<string, LimitProps> = {};
APIKey.limits?.forEach((limitItem: LimitProps) => {
limitsDict[limitItem.signal] = limitItem;
APIKey.limits?.forEach((limitItem) => {
if (limitItem.signal && limitItem.id) {
limitsDict[limitItem.signal] = limitItem as LimitProps;
}
});
const hasLimits = (signalName: string): boolean => !!limitsDict[signalName];
@@ -812,39 +969,25 @@ function MultiIngestionSettings(): JSX.Element {
<div className="ingestion-key-value">
<Typography.Text>
{APIKey?.value.substring(0, 2)}********
{APIKey?.value.substring(APIKey.value.length - 2).trim()}
{APIKey?.value?.substring(0, 2)}********
{APIKey?.value
?.substring(APIKey?.value?.length ? APIKey.value.length - 2 : 0)
.trim()}
</Typography.Text>
<Copy
className="copy-key-btn"
size={12}
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
handleCopyKey(APIKey.value);
}}
/>
<Copy className="copy-key-btn" size={12} onClick={onCopyKey} />
</div>
</div>
<div className="action-btn">
<Button
className="periscope-btn ghost"
icon={<PenLine size={14} />}
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
showEditModal(APIKey);
}}
onClick={onEditKey}
/>
<Button
className="periscope-btn ghost"
icon={<Trash2 color={Color.BG_CHERRY_500} size={14} />}
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
showDeleteModal(APIKey);
}}
onClick={onDeleteKey}
/>
</div>
</div>
@@ -854,7 +997,7 @@ function MultiIngestionSettings(): JSX.Element {
<Row>
<Col span={6}> ID </Col>
<Col span={12}>
<Typography.Text>{APIKey.id}</Typography.Text>
<Typography.Text>{APIKey?.id}</Typography.Text>
</Col>
</Row>
@@ -906,6 +1049,39 @@ function MultiIngestionSettings(): JSX.Element {
limit?.config?.second?.size !== undefined ||
limit?.config?.second?.count !== undefined;
const onEditSignalLimit = (e: React.MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
enableEditLimitMode(APIKey, limit);
};
const onDeleteSignalLimit = (e: React.MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
showDeleteLimitModal(APIKey, limit);
};
const onAddSignalLimit = (e: React.MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
enableEditLimitMode(APIKey, {
id: signalName,
signal: signalName,
config: {},
});
};
const onSaveSignalLimit = (): void => {
if (!hasLimits(signalName)) {
handleAddLimit(APIKey, signalName);
} else {
handleUpdateLimit(APIKey, limitsDict[signalName]);
}
};
const onCreateSignalAlert = (): void =>
handleCreateAlert(APIKey, limitsDict[signalName]);
return (
<div className="signal" key={signalName}>
<div className="header">
@@ -916,22 +1092,18 @@ function MultiIngestionSettings(): JSX.Element {
<Button
className="periscope-btn ghost"
icon={<PenLine size={14} />}
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
enableEditLimitMode(APIKey, limit);
}}
disabled={
!!(activeAPIKey?.id === APIKey?.id && activeSignal)
}
onClick={onEditSignalLimit}
/>
<Button
className="periscope-btn ghost"
icon={<Trash2 color={Color.BG_CHERRY_500} size={14} />}
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
showDeleteLimitModal(APIKey, limit);
}}
disabled={
!!(activeAPIKey?.id === APIKey?.id && activeSignal)
}
onClick={onDeleteSignalLimit}
/>
</>
) : (
@@ -940,16 +1112,8 @@ function MultiIngestionSettings(): JSX.Element {
size="small"
shape="round"
icon={<PlusIcon size={14} />}
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
enableEditLimitMode(APIKey, {
id: signalName,
signal: signalName,
config: {},
});
}}
disabled={!!(activeAPIKey?.id === APIKey?.id && activeSignal)}
onClick={onAddSignalLimit}
>
Limits
</Button>
@@ -958,7 +1122,7 @@ function MultiIngestionSettings(): JSX.Element {
</div>
<div className="signal-limit-values">
{activeAPIKey?.id === APIKey.id &&
{activeAPIKey?.id === APIKey?.id &&
activeSignal?.signal === signalName &&
isEditAddLimitOpen ? (
<Form
@@ -1154,27 +1318,27 @@ function MultiIngestionSettings(): JSX.Element {
</div>
</div>
{activeAPIKey?.id === APIKey.id &&
{activeAPIKey?.id === APIKey?.id &&
activeSignal.signal === signalName &&
!isLoadingLimitForKey &&
hasCreateLimitForIngestionKeyError &&
createLimitForIngestionKeyError?.error && (
createLimitForIngestionKeyError && (
<div className="error">
{createLimitForIngestionKeyError?.error}
{createLimitForIngestionKeyError}
</div>
)}
{activeAPIKey?.id === APIKey.id &&
{activeAPIKey?.id === APIKey?.id &&
activeSignal.signal === signalName &&
!isLoadingLimitForKey &&
hasUpdateLimitForIngestionKeyError &&
updateLimitForIngestionKeyError?.error && (
updateLimitForIngestionKeyError && (
<div className="error">
{updateLimitForIngestionKeyError?.error}
{updateLimitForIngestionKeyError}
</div>
)}
{activeAPIKey?.id === APIKey.id &&
{activeAPIKey?.id === APIKey?.id &&
activeSignal.signal === signalName &&
isEditAddLimitOpen && (
<div className="signal-limit-save-discard">
@@ -1188,13 +1352,7 @@ function MultiIngestionSettings(): JSX.Element {
loading={
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
}
onClick={(): void => {
if (!hasLimits(signalName)) {
handleAddLimit(APIKey, signalName);
} else {
handleUpdateLimit(APIKey, limitsDict[signalName]);
}
}}
onClick={onSaveSignalLimit}
>
Save
</Button>
@@ -1275,9 +1433,7 @@ function MultiIngestionSettings(): JSX.Element {
className="set-alert-btn periscope-btn ghost"
type="text"
data-testid={`set-alert-btn-${signalName}`}
onClick={(): void =>
handleCreateAlert(APIKey, limitsDict[signalName])
}
onClick={onCreateSignalAlert}
/>
</Tooltip>
)}
@@ -1392,7 +1548,7 @@ function MultiIngestionSettings(): JSX.Element {
const handleTableChange = (pagination: TablePaginationConfig): void => {
setPaginationParams({
page: pagination?.current || 1,
per_page: 10,
per_page: INITIAL_PAGE_SIZE,
});
};
@@ -1490,7 +1646,7 @@ function MultiIngestionSettings(): JSX.Element {
showHeader={false}
onChange={handleTableChange}
pagination={{
pageSize: paginationParams?.per_page,
pageSize: isSearching ? SEARCH_PAGE_SIZE : paginationParams?.per_page,
hideOnSinglePage: true,
showTotal: (total: number, range: number[]): string =>
`${range[0]}-${range[1]} of ${total} Ingestion keys`,

View File

@@ -86,32 +86,34 @@ describe('MultiIngestionSettings Page', () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
// Arrange API response with a metrics daily count limit so the alert button is visible
const response: TestAllIngestionKeyProps = {
const response = {
status: 'success',
data: [
{
name: 'Key One',
expires_at: TEST_EXPIRES_AT,
value: 'secret',
workspace_id: TEST_WORKSPACE_ID,
id: 'k1',
created_at: TEST_CREATED_UPDATED,
updated_at: TEST_CREATED_UPDATED,
tags: [],
limits: [
{
id: 'l1',
signal: 'metrics',
config: { day: { count: 1000 } },
},
],
},
],
_pagination: { page: 1, per_page: 10, pages: 1, total: 1 },
data: {
keys: [
{
name: 'Key One',
expires_at: TEST_EXPIRES_AT,
value: 'secret',
workspace_id: TEST_WORKSPACE_ID,
id: 'k1',
created_at: TEST_CREATED_UPDATED,
updated_at: TEST_CREATED_UPDATED,
tags: [],
limits: [
{
id: 'l1',
signal: 'metrics',
config: { day: { count: 1000 } },
},
],
},
],
_pagination: { page: 1, per_page: 10, pages: 1, total: 1 },
},
};
server.use(
rest.get('*/workspaces/me/keys*', (_req, res, ctx) =>
rest.get('*/api/v2/gateway/ingestion_keys*', (_req, res, ctx) =>
res(ctx.status(200), ctx.json(response)),
),
);

View File

@@ -2,13 +2,16 @@ import { useEffect, useState } from 'react';
import { useCopyToClipboard } from 'react-use';
import { Button, Skeleton, Tooltip, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useGetIngestionKeys } from 'api/generated/services/gateway';
import {
GatewaytypesIngestionKeyDTO,
RenderErrorResponseDTO,
} from 'api/generated/services/sigNoz.schemas';
import { AxiosError } from 'axios';
import { DOCS_BASE_URL } from 'constants/app';
import { useGetGlobalConfig } from 'hooks/globalConfig/useGetGlobalConfig';
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
import { useNotifications } from 'hooks/useNotifications';
import { ArrowUpRight, Copy, Info, Key, TriangleAlert } from 'lucide-react';
import { IngestionKeyProps } from 'types/api/ingestionKeys/types';
import './IngestionDetails.styles.scss';
@@ -39,17 +42,17 @@ export default function OnboardingIngestionDetails(): JSX.Element {
const { notifications } = useNotifications();
const [, handleCopyToClipboard] = useCopyToClipboard();
const [firstIngestionKey, setFirstIngestionKey] = useState<IngestionKeyProps>(
{} as IngestionKeyProps,
);
const [
firstIngestionKey,
setFirstIngestionKey,
] = useState<GatewaytypesIngestionKeyDTO>({} as GatewaytypesIngestionKeyDTO);
const {
data: ingestionKeys,
isLoading: isIngestionKeysLoading,
error,
isError,
} = useGetAllIngestionsKeys({
search: '',
} = useGetIngestionKeys({
page: 1,
per_page: 10,
});
@@ -69,8 +72,11 @@ export default function OnboardingIngestionDetails(): JSX.Element {
};
useEffect(() => {
if (ingestionKeys?.data.data && ingestionKeys?.data.data.length > 0) {
setFirstIngestionKey(ingestionKeys?.data.data[0]);
if (
ingestionKeys?.data?.data?.keys &&
ingestionKeys?.data.data.keys.length > 0
) {
setFirstIngestionKey(ingestionKeys?.data.data.keys[0]);
}
}, [ingestionKeys]);
@@ -80,7 +86,8 @@ export default function OnboardingIngestionDetails(): JSX.Element {
<div className="ingestion-endpoint-section-error-container">
<Typography.Text className="ingestion-endpoint-section-error-text error">
<TriangleAlert size={14} />{' '}
{(error as AxiosError)?.message || 'Something went wrong'}
{(error as AxiosError<RenderErrorResponseDTO>)?.response?.data?.error
?.message || 'Something went wrong'}
</Typography.Text>
<div className="ingestion-setup-details-links">
@@ -176,7 +183,7 @@ export default function OnboardingIngestionDetails(): JSX.Element {
</Typography.Text>
<Typography.Text className="ingestion-key-value-copy">
{maskKey(firstIngestionKey?.value)}
{maskKey(firstIngestionKey?.value || '')}
<Copy
size={14}
@@ -186,7 +193,9 @@ export default function OnboardingIngestionDetails(): JSX.Element {
`${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.BASE}: ${ONBOARDING_V3_ANALYTICS_EVENTS_MAP?.INGESTION_KEY_COPIED}`,
{},
);
handleCopyKey(firstIngestionKey?.value);
if (firstIngestionKey?.value) {
handleCopyKey(firstIngestionKey.value);
}
}}
/>
</Typography.Text>

View File

@@ -14,7 +14,7 @@ describe('Get Series Data', () => {
expect(seriesData.length).toBe(5);
expect(seriesData[1].label).toBe('firstLegend');
expect(seriesData[1].show).toBe(true);
expect(seriesData[1].fill).toBe('#FF6F91');
expect(seriesData[1].fill).toBe('#C71585');
expect(seriesData[1].width).toBe(2);
});

127
pkg/apis/fields/api.go Normal file
View File

@@ -0,0 +1,127 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

162
pkg/apis/fields/parse.go Normal file
View File

@@ -0,0 +1,162 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -1,50 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -45,7 +44,6 @@ type provider struct {
gatewayHandler gateway.Handler
roleGetter role.Getter
roleHandler role.Handler
fieldsHandler fields.Handler
}
func NewFactory(
@@ -65,31 +63,9 @@ func NewFactory(
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
roleGetter,
roleHandler,
fieldsHandler,
)
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
})
}
@@ -113,7 +89,6 @@ func newProvider(
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -136,7 +111,6 @@ func newProvider(
gatewayHandler: gatewayHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
fieldsHandler: fieldsHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
@@ -201,10 +175,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -1,11 +0,0 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -1,79 +0,0 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -25,6 +25,7 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -144,6 +145,8 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -174,6 +177,8 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -238,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -393,6 +399,13 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)

View File

@@ -17,6 +17,7 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -213,6 +215,7 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -11,8 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
@@ -30,7 +28,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -47,19 +44,9 @@ type Handlers struct {
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Role role.Handler
Fields fields.Handler
}
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -74,6 +61,5 @@ func NewHandlers(
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
}
}

View File

@@ -46,7 +46,7 @@ func TestNewHandlers(t *testing.T) {
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -2,7 +2,6 @@ package signoz
import (
"context"
"net/http"
"os"
"reflect"
@@ -16,7 +15,6 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -25,8 +23,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/gorilla/mux"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
@@ -56,16 +52,11 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ gateway.Handler }{},
struct{ role.Getter }{},
struct{ role.Handler }{},
struct{ fields.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -106,25 +97,3 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
return os.WriteFile(path, spec, 0o600)
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
},
)).Methods(http.MethodPost)
}

View File

@@ -249,7 +249,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.GatewayHandler,
modules.RoleGetter,
handlers.Role,
handlers.Fields,
),
)
}

View File

@@ -396,7 +396,7 @@ func New(
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -1,707 +0,0 @@
package querybuildertypesv5
import (
"github.com/swaggest/jsonschema-go"
)
// Enum returns the acceptable values for QueryType.
func (QueryType) Enum() []any {
return []any{
QueryTypeBuilder,
QueryTypeFormula,
// Not yet supported.
// QueryTypeSubQuery,
// QueryTypeJoin,
QueryTypeTraceOperator,
QueryTypeClickHouseSQL,
QueryTypePromQL,
}
}
// Enum returns the acceptable values for RequestType.
func (RequestType) Enum() []any {
return []any{
RequestTypeScalar,
RequestTypeTimeSeries,
RequestTypeRaw,
RequestTypeRawStream,
RequestTypeTrace,
// RequestTypeDistribution,
}
}
// Enum returns the acceptable values for FunctionName.
func (FunctionName) Enum() []any {
return []any{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero,
}
}
// Enum returns the acceptable values for OrderDirection.
func (OrderDirection) Enum() []any {
return []any{
OrderDirectionAsc,
OrderDirectionDesc,
}
}
// Enum returns the acceptable values for ReduceTo.
func (ReduceTo) Enum() []any {
return []any{
ReduceToSum,
ReduceToCount,
ReduceToAvg,
ReduceToMin,
ReduceToMax,
ReduceToLast,
ReduceToMedian,
}
}
// Enum returns the acceptable values for VariableType.
func (VariableType) Enum() []any {
return []any{
QueryVariableType,
DynamicVariableType,
CustomVariableType,
TextBoxVariableType,
}
}
// Enum returns the acceptable values for JoinType.
func (JoinType) Enum() []any {
return []any{
JoinTypeInner,
JoinTypeLeft,
JoinTypeRight,
JoinTypeFull,
JoinTypeCross,
}
}
// Enum returns the acceptable values for ColumnType.
func (ColumnType) Enum() []any {
return []any{
ColumnTypeGroup,
ColumnTypeAggregation,
}
}
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
type queryEnvelopeBuilderTrace struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
}
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
type queryEnvelopeBuilderLog struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
}
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
type queryEnvelopeBuilderMetric struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
}
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
type queryEnvelopeFormula struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
}
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
// type queryEnvelopeJoin struct {
// Type QueryType `json:"type" description:"The type of the query."`
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
// }
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
type queryEnvelopeTraceOperator struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
}
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
type queryEnvelopePromQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
}
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
type queryEnvelopeClickHouseSQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
}
var _ jsonschema.OneOfExposer = QueryEnvelope{}
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
// Each variant represents a different query type with its corresponding spec schema.
func (QueryEnvelope) JSONSchemaOneOf() []any {
return []any{
queryEnvelopeBuilderTrace{},
queryEnvelopeBuilderLog{},
queryEnvelopeBuilderMetric{},
queryEnvelopeFormula{},
// queryEnvelopeJoin{},
queryEnvelopeTraceOperator{},
queryEnvelopePromQL{},
queryEnvelopeClickHouseSQL{},
}
}
var _ jsonschema.Exposer = Step{}
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
numSchema.WithDescription("Duration in seconds.")
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
var _ jsonschema.OneOfExposer = QueryData{}
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
func (QueryData) JSONSchemaOneOf() []any {
return []any{
TimeSeriesData{},
ScalarData{},
RawData{},
}
}
var _ jsonschema.Preparer = &QueryRangeRequest{}
// PrepareJSONSchema adds examples and description to the QueryRangeRequest schema.
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
schema.WithExamples(
// 1. time_series + traces builder: count spans grouped by service, ordered by count
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "span_count",
},
},
"stepInterval": "60s",
"filter": map[string]any{
"expression": "service.name = 'frontend'",
},
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "span_count"},
"direction": "desc",
},
},
"limit": 10,
},
},
},
},
},
// 2. time_series + logs builder: count logs grouped by service
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "log_count",
},
},
"stepInterval": "60s",
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "log_count"},
"direction": "desc",
},
},
"limit": 10,
},
},
},
},
},
// 3. time_series + metrics builder (Gauge): latest value averaged across series
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "system.cpu.utilization",
"timeAggregation": "latest",
"spaceAggregation": "avg",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "host.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 4. time_series + metrics builder (Sum): rate of cumulative counter
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.count",
"timeAggregation": "rate",
"spaceAggregation": "sum",
},
},
"stepInterval": 120,
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 5. time_series + metrics builder (Histogram): p99 latency
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.bucket",
"spaceAggregation": "p99",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 6. raw + logs builder: fetch raw log records
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
"selectFields": []any{
map[string]any{
"name": "body",
"fieldContext": "log",
},
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "timestamp", "fieldContext": "log"},
"direction": "desc",
},
map[string]any{
"key": map[string]any{"name": "id"},
"direction": "desc",
},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
// 7. raw + traces builder: fetch raw span records
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{
"expression": "service.name = 'frontend' AND has_error = true",
},
"selectFields": []any{
map[string]any{
"name": "name",
"fieldContext": "span",
},
map[string]any{
"name": "duration_nano",
"fieldContext": "span",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "timestamp", "fieldContext": "span"},
"direction": "desc",
},
},
"limit": 100,
},
},
},
},
},
// 8. scalar + traces builder: total span count as a single value
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "span_count",
},
},
"filter": map[string]any{
"expression": "service.name = 'frontend'",
},
},
},
},
},
},
// 9. scalar + logs builder: total error log count as a single value
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "error_count",
},
},
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
},
},
},
},
},
// 10. scalar + metrics builder: single reduced value with reduceTo
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.count",
"timeAggregation": "rate",
"spaceAggregation": "sum",
"reduceTo": "sum",
},
},
"stepInterval": "60s",
},
},
},
},
},
// 11. builder formula: error rate from two trace queries
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "countIf(has_error = true)",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
// 12. PromQL query with UTF-8 dot metric name
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
// 13. ClickHouse SQL — time_series
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime GROUP BY ts ORDER BY ts",
},
},
},
},
},
// 14. ClickHouse SQL — raw
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "SELECT timestamp, body FROM signoz_logs.distributed_logs_v2 WHERE timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano AND severity_text = 'ERROR' ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
// 15. ClickHouse SQL — scalar
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "SELECT count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime",
},
},
},
},
},
)
return nil
}
var _ jsonschema.Preparer = &QueryRangeResponse{}
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
return nil
}
var _ jsonschema.Preparer = &CompositeQuery{}
// PrepareJSONSchema adds description to the CompositeQuery schema.
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
return nil
}
var _ jsonschema.Preparer = &ExecStats{}
// PrepareJSONSchema adds description to the ExecStats schema.
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
return nil
}

View File

@@ -266,110 +266,3 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
type GettableFieldKeys struct {
Keys map[string][]*TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
type PostableFieldKeysParams struct {
Signal Signal `query:"signal"`
Source Source `query:"source"`
Limit int `query:"limit"`
StartUnixMilli int64 `query:"startUnixMilli"`
EndUnixMilli int64 `query:"endUnixMilli"`
FieldContext FieldContext `query:"fieldContext"`
FieldDataType FieldDataType `query:"fieldDataType"`
MetricName string `query:"metricName"`
SearchText string `query:"searchText"`
}
type GettableFieldValues struct {
Values *TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
type PostableFieldValueParams struct {
PostableFieldKeysParams
Name string `query:"name"`
ExistingQuery string `query:"existingQuery"`
}
func NewFieldKeySelectorFromPostableFieldKeysParams(params PostableFieldKeysParams) *FieldKeySelector {
var req FieldKeySelector
if params.StartUnixMilli != 0 {
req.StartUnixMilli = params.StartUnixMilli
// Round down to the nearest 6 hours (21600000 milliseconds)
req.StartUnixMilli -= req.StartUnixMilli % 21600000
}
if params.EndUnixMilli != 0 {
req.EndUnixMilli = params.EndUnixMilli
}
req.Signal = params.Signal
req.Source = params.Source
req.FieldContext = params.FieldContext
req.FieldDataType = params.FieldDataType
req.SelectorMatchType = FieldSelectorMatchTypeFuzzy
if params.Limit != 0 {
req.Limit = params.Limit
} else {
req.Limit = 1000
}
if params.MetricName != "" {
req.MetricContext = &MetricContext{
MetricName: params.MetricName,
}
}
req.Name = params.SearchText
if params.SearchText != "" && params.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.SearchText)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, req.Signal) {
req.Name = parsedFieldKey.Name
req.FieldContext = parsedFieldKey.FieldContext
}
}
}
return &req
}
func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValueParams) *FieldValueSelector {
keySelector := NewFieldKeySelectorFromPostableFieldKeysParams(params.PostableFieldKeysParams)
fieldValueSelector := &FieldValueSelector{
FieldKeySelector: keySelector,
}
fieldValueSelector.Name = params.Name
if params.Name != "" && fieldValueSelector.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.Name)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, fieldValueSelector.Signal) {
fieldValueSelector.Name = parsedFieldKey.Name
fieldValueSelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
fieldValueSelector.ExistingQuery = params.ExistingQuery
fieldValueSelector.Value = params.SearchText
if params.Limit != 0 {
fieldValueSelector.Limit = params.Limit
} else {
fieldValueSelector.Limit = 50
}
return fieldValueSelector
}

View File

@@ -154,21 +154,3 @@ func (f FieldContext) TagType() string {
}
return ""
}
func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
if ctx == FieldContextResource ||
ctx == FieldContextAttribute ||
ctx == FieldContextScope {
return true
}
switch signal.StringValue() {
case SignalLogs.StringValue():
return ctx == FieldContextLog || ctx == FieldContextBody
case SignalTraces.StringValue():
return ctx == FieldContextSpan || ctx == FieldContextEvent || ctx == FieldContextTrace
case SignalMetrics.StringValue():
return ctx == FieldContextMetric
}
return true
}

View File

@@ -1,48 +0,0 @@
package telemetrytypes
// Enum returns the acceptable values for Signal.
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}
// Enum returns the acceptable values for FieldContext.
func (FieldContext) Enum() []any {
return []any{
FieldContextMetric,
FieldContextLog,
FieldContextSpan,
// FieldContextTrace,
FieldContextResource,
// FieldContextScope,
FieldContextAttribute,
// FieldContextEvent,
FieldContextBody,
}
}
// Enum returns the acceptable values for Source.
func (Source) Enum() []any {
return []any{
SourceMeter,
}
}
// Enum returns the acceptable values for FieldDataType.
func (FieldDataType) Enum() []any {
return []any{
FieldDataTypeString,
FieldDataTypeBool,
FieldDataTypeFloat64,
FieldDataTypeInt64,
FieldDataTypeNumber,
// FieldDataTypeArrayString,
// FieldDataTypeArrayFloat64,
// FieldDataTypeArrayBool,
// FieldDataTypeArrayInt64,
// FieldDataTypeArrayNumber,
}
}

View File

@@ -107,13 +107,3 @@ func (enum *Email) UnmarshalText(text []byte) error {
func (enum Email) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *Email) UnmarshalParam(param string) error {
email, err := NewEmail(param)
if err != nil {
return err
}
*enum = email
return nil
}

View File

@@ -77,10 +77,3 @@ func (enum *String) UnmarshalText(text []byte) error {
func (enum String) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
// Implement Gin's BindUnmarshaler interface
// See https://github.com/SigNoz/signoz/pull/10219 description for additional details
func (enum *String) UnmarshalParam(param string) error {
*enum = NewString(param)
return nil
}

View File

@@ -140,13 +140,3 @@ func (enum *UUID) UnmarshalText(text []byte) error {
func (enum UUID) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *UUID) UnmarshalParam(param string) error {
uuid, err := NewUUID(param)
if err != nil {
return err
}
*enum = uuid
return nil
}

View File

@@ -8,7 +8,6 @@ import (
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
ginbinding "github.com/gin-gonic/gin/binding"
)
var (
@@ -43,7 +42,4 @@ type Valuer interface {
// Implement encoding.TextUnmarshaler to allow the value to be marshalled unto a string
encoding.TextMarshaler
// Implement Gin's BindUnmarshaler interface
ginbinding.BindUnmarshaler
}