Compare commits

..

6 Commits

Author SHA1 Message Date
Abhi kumar
cc10b488d9 Merge branch 'main' into test/uplot-utils-test 2026-02-09 20:00:13 +05:30
Ashwin Bhatkal
72b0f27494 chore: use variable select strategy + (#10245)
* chore: use variable select strategy

* chore: revert custom multi select

* chore: fix tests

* chore: fix tests

* chore: fix flaky test
2026-02-09 14:29:28 +00:00
Abhi kumar
e36b647bc7 test: added test suites for uplotchart component (#10247)
* test: added test suites for uplotchart component

* chore: resolved pr review comments

* chore: resolved pr review comments
2026-02-09 14:15:21 +00:00
Abhi kumar
7eec0edfd0 Merge branch 'main' into test/uplot-utils-test 2026-02-09 19:33:49 +05:30
Nikhil Soni
b491772eaa fix: ensure trace time range is fetch correctly (#10252)
If multiple batches are inserted with same trace_id, then
trace_summary table can have multiple rows before they are
aggregated by clickhouse. Query to get the time range from
trace_summary was assuming a single which was create
unpredictable behaviour as any random row could be returned.
2026-02-09 19:31:08 +05:30
Abhi Kumar
6f5a0258f2 test: added tests for uplotv2 utils 2026-02-09 19:28:54 +05:30
67 changed files with 6195 additions and 9832 deletions

View File

@@ -1,7 +1,6 @@
package api
import (
"log/slog"
"net/http"
"net/http/httputil"
"time"
@@ -14,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
@@ -21,7 +21,6 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -31,14 +30,13 @@ type APIHandlerOptions struct {
RulesManager *rules.Manager
UsageManager *usage.Manager
IntegrationsController *integrations.Controller
CloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
GlobalConfig global.Config
Logger *slog.Logger // this is present in Signoz.Instrumentation but adding for quick access
}
type APIHandler struct {
@@ -52,7 +50,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
Reader: opts.DataConnector,
RuleManager: opts.RulesManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsRegistry: opts.CloudIntegrationsRegistry,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
@@ -120,12 +118,14 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
}
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)
router.HandleFunc(
"/api/v1/cloud-integrations/{cloudProvider}/accounts/generate-connection-params",
am.EditAccess(ah.CloudIntegrationsGenerateConnectionParams),
).Methods(http.MethodGet)
}
func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {

View File

@@ -6,7 +6,6 @@ import (
"encoding/json"
"fmt"
"io"
"log/slog"
"net/http"
"strings"
"time"
@@ -14,14 +13,20 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
// TODO: move this file with other cloud integration related code
type CloudIntegrationConnectionParamsResponse struct {
IngestionUrl string `json:"ingestion_url,omitempty"`
IngestionKey string `json:"ingestion_key,omitempty"`
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
}
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
@@ -36,21 +41,23 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
cloudProvider := mux.Vars(r)["cloudProvider"]
if cloudProvider != "aws" {
RespondError(w, basemodel.BadRequest(fmt.Errorf(
"cloud provider not supported: %s", cloudProvider,
)), nil)
return
}
apiKey, err := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if err != nil {
render.Error(w, err)
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't provision PAT for cloud integration:",
), nil)
return
}
result := integrationstypes.GettableCloudIntegrationConnectionParams{
result := CloudIntegrationConnectionParamsResponse{
SigNozAPIKey: apiKey,
}
@@ -64,17 +71,16 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
// Return the API Key (PAT) even if the rest of the params can not be deduced.
// Params not returned from here will be requested from the user via form inputs.
// This enables gracefully degraded but working experience even for non-cloud deployments.
ah.opts.Logger.InfoContext(
r.Context(),
"ingestion params and signoz api url can not be deduced since no license was found",
)
render.Success(w, http.StatusOK, result)
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
ah.Respond(w, result)
return
}
signozApiUrl, err := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if err != nil {
render.Error(w, err)
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
), nil)
return
}
@@ -83,41 +89,48 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
gatewayUrl := ah.opts.GatewayUrl
if len(gatewayUrl) > 0 {
ingestionKeyString, err := ah.getOrCreateCloudProviderIngestionKey(
ingestionKey, apiErr := getOrCreateCloudProviderIngestionKey(
r.Context(), gatewayUrl, license.Key, cloudProvider,
)
if err != nil {
render.Error(w, err)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't get or create ingestion key",
), nil)
return
}
result.IngestionKey = ingestionKeyString
result.IngestionKey = ingestionKey
} else {
ah.opts.Logger.InfoContext(
r.Context(),
"ingestion key can't be deduced since no gateway url has been configured",
)
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
}
render.Success(w, http.StatusOK, result)
ah.Respond(w, result)
}
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider valuer.String) (string, error) {
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider string) (
string, *basemodel.ApiError,
) {
integrationPATName := fmt.Sprintf("%s integration", cloudProvider)
integrationUser, err := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if err != nil {
return "", err
integrationUser, apiErr := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if apiErr != nil {
return "", apiErr
}
orgIdUUID, err := valuer.NewUUID(orgId)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't parse orgId: %w", err,
))
}
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err,
))
}
for _, p := range allPats {
if p.UserID == integrationUser.ID && p.Name == integrationPATName {
@@ -125,10 +138,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
}
}
ah.opts.Logger.InfoContext(
ctx,
zap.L().Info(
"no PAT found for cloud integration, creating a new one",
slog.String("cloudProvider", cloudProvider.String()),
zap.String("cloudProvider", cloudProvider),
)
newPAT, err := types.NewStorableAPIKey(
@@ -138,48 +150,68 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
0,
)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
return newPAT.Token, nil
}
// TODO: move this function out of handler and use proper module structure
func (ah *APIHandler) getOrCreateCloudIntegrationUser(ctx context.Context, orgId string, cloudProvider valuer.String) (*types.User, error) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider.String())
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
if err != nil {
return nil, err
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, err
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
}
return cloudIntegrationUser, nil
}
// TODO: move this function out of handler and use proper module structure
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (string, error) {
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't query for deployment info: error")
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, *basemodel.ApiError,
) {
// TODO: remove this struct from here
type deploymentResponse struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
}
resp := new(integrationstypes.GettableDeployment)
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: error: %w", err,
))
}
resp := new(deploymentResponse)
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal deployment info response")
return "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
}
regionDns := resp.ClusterInfo.Region.DNS
@@ -187,11 +219,9 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.WrapInternalf(
err,
errors.CodeInternal,
return "", basemodel.InternalError(fmt.Errorf(
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
)
))
}
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
@@ -199,85 +229,102 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
return signozApiUrl, nil
}
func (ah *APIHandler) getOrCreateCloudProviderIngestionKey(
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider valuer.String,
) (string, error) {
type ingestionKey struct {
Name string `json:"name"`
Value string `json:"value"`
// other attributes from gateway response not included here since they are not being used.
}
type ingestionKeysSearchResponse struct {
Status string `json:"status"`
Data []ingestionKey `json:"data"`
Error string `json:"error"`
}
type createIngestionKeyResponse struct {
Status string `json:"status"`
Data ingestionKey `json:"data"`
Error string `json:"error"`
}
func getOrCreateCloudProviderIngestionKey(
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider string,
) (string, *basemodel.ApiError) {
cloudProviderKeyName := fmt.Sprintf("%s-integration", cloudProvider)
// see if the key already exists
searchResult, err := requestGateway[integrationstypes.GettableIngestionKeysSearch](
searchResult, apiErr := requestGateway[ingestionKeysSearchResponse](
ctx,
gatewayUrl,
licenseKey,
fmt.Sprintf("/v1/workspaces/me/keys/search?name=%s", cloudProviderKeyName),
nil,
ah.opts.Logger,
)
if err != nil {
return "", err
if apiErr != nil {
return "", basemodel.WrapApiError(
apiErr, "couldn't search for cloudprovider ingestion key",
)
}
if searchResult.Status != "success" {
return "", errors.NewInternalf(
errors.CodeInternal,
"couldn't search for cloud provider ingestion key: status: %s, error: %s",
return "", basemodel.InternalError(fmt.Errorf(
"couldn't search for cloudprovider ingestion key: status: %s, error: %s",
searchResult.Status, searchResult.Error,
)
))
}
for _, k := range searchResult.Data {
if k.Name != cloudProviderKeyName {
continue
}
if k.Name == cloudProviderKeyName {
if len(k.Value) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", basemodel.InternalError(fmt.Errorf(
"ingestion keys search response not as expected",
))
}
if len(k.Value) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.NewInternalf(errors.CodeInternal, "ingestion keys search response not as expected")
return k.Value, nil
}
return k.Value, nil
}
ah.opts.Logger.InfoContext(
ctx,
zap.L().Info(
"no existing ingestion key found for cloud integration, creating a new one",
slog.String("cloudProvider", cloudProvider.String()),
zap.String("cloudProvider", cloudProvider),
)
createKeyResult, err := requestGateway[integrationstypes.GettableCreateIngestionKey](
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
map[string]any{
"name": cloudProviderKeyName,
"tags": []string{"integration", cloudProvider.String()},
"tags": []string{"integration", cloudProvider},
},
ah.opts.Logger,
)
if err != nil {
return "", err
if apiErr != nil {
return "", basemodel.WrapApiError(
apiErr, "couldn't create cloudprovider ingestion key",
)
}
if createKeyResult.Status != "success" {
return "", errors.NewInternalf(
errors.CodeInternal,
"couldn't create cloud provider ingestion key: status: %s, error: %s",
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloudprovider ingestion key: status: %s, error: %s",
createKeyResult.Status, createKeyResult.Error,
)
))
}
ingestionKeyString := createKeyResult.Data.Value
if len(ingestionKeyString) < 1 {
ingestionKey := createKeyResult.Data.Value
if len(ingestionKey) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.NewInternalf(errors.CodeInternal,
return "", basemodel.InternalError(fmt.Errorf(
"ingestion key creation response not as expected",
)
))
}
return ingestionKeyString, nil
return ingestionKey, nil
}
func requestGateway[ResponseType any](
ctx context.Context, gatewayUrl, licenseKey, path string, payload any, logger *slog.Logger,
) (*ResponseType, error) {
ctx context.Context, gatewayUrl string, licenseKey string, path string, payload any,
) (*ResponseType, *basemodel.ApiError) {
baseUrl := strings.TrimSuffix(gatewayUrl, "/")
reqUrl := fmt.Sprintf("%s%s", baseUrl, path)
@@ -288,12 +335,13 @@ func requestGateway[ResponseType any](
"X-Consumer-Groups": "ns:default",
}
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload, logger)
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload)
}
func requestAndParseResponse[ResponseType any](
ctx context.Context, url string, headers map[string]string, payload any, logger *slog.Logger,
) (*ResponseType, error) {
ctx context.Context, url string, headers map[string]string, payload any,
) (*ResponseType, *basemodel.ApiError) {
reqMethod := http.MethodGet
var reqBody io.Reader
if payload != nil {
@@ -301,14 +349,18 @@ func requestAndParseResponse[ResponseType any](
bodyJson, err := json.Marshal(payload)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal payload")
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't serialize request payload to JSON: %w", err,
))
}
reqBody = bytes.NewBuffer(bodyJson)
reqBody = bytes.NewBuffer([]byte(bodyJson))
}
req, err := http.NewRequestWithContext(ctx, reqMethod, url, reqBody)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't create req")
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't prepare request: %w", err,
))
}
for k, v := range headers {
@@ -321,26 +373,23 @@ func requestAndParseResponse[ResponseType any](
response, err := client.Do(req)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't make req")
return nil, basemodel.InternalError(fmt.Errorf("couldn't make request: %w", err))
}
defer func() {
err = response.Body.Close()
if err != nil {
logger.ErrorContext(ctx, "couldn't close response body", "error", err)
}
}()
defer response.Body.Close()
respBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read response body")
return nil, basemodel.InternalError(fmt.Errorf("couldn't read response: %w", err))
}
var resp ResponseType
err = json.Unmarshal(respBody, &resp)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal response body")
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal gateway response into %T", resp,
))
}
return &resp, nil

View File

@@ -127,7 +127,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
)
}
cloudIntegrationsRegistry := cloudintegrations.NewCloudProviderRegistry(signoz.Instrumentation.Logger(), signoz.SQLStore, signoz.Querier)
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
)
}
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
@@ -162,13 +167,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
RulesManager: rm,
UsageManager: usageManager,
IntegrationsController: integrationsController,
CloudIntegrationsRegistry: cloudIntegrationsRegistry,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
Logger: signoz.Instrumentation.Logger(),
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)

View File

@@ -140,10 +140,7 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
}, [selectedValues, allAvailableValues, enableAllSelection]);
// Define allOptionShown earlier in the code
const allOptionShown = useMemo(
() => value === ALL_SELECTED_VALUE || value === 'ALL',
[value],
);
const allOptionShown = value === ALL_SELECTED_VALUE;
// Value passed to the underlying Ant Select component
const displayValue = useMemo(

View File

@@ -1,10 +1,11 @@
import { memo, useMemo } from 'react';
import { memo, useEffect, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { VariableItemProps } from './VariableItem';
import { customVariableSelectStrategy } from './variableSelectStrategy/customVariableSelectStrategy';
type CustomVariableInputProps = Pick<
VariableItemProps,
@@ -29,16 +30,31 @@ function CustomVariableInput({
onChange,
onDropdownVisibleChange,
handleClear,
applyDefaultIfNeeded,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy: customVariableSelectStrategy,
});
// Apply default on mount — options are available synchronously for custom variables
// eslint-disable-next-line react-hooks/exhaustive-deps
useEffect(applyDefaultIfNeeded, []);
const selectOptions = useMemo(
() =>
optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[optionsData],
);
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
options={selectOptions}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}

View File

@@ -13,7 +13,6 @@ import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import DynamicVariableSelection from './DynamicVariableSelection';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
@@ -153,14 +152,7 @@ function DashboardVariableSelection(): JSX.Element | null {
{sortedVariablesArray.map((variable) => {
const key = `${variable.name}${variable.id}${variable.order}`;
return variable.type === 'DYNAMIC' ? (
<DynamicVariableSelection
key={key}
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
/>
) : (
return (
<VariableItem
key={key}
existingVariables={dashboardVariables}

View File

@@ -0,0 +1,343 @@
import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import useDebounce from 'hooks/useDebounce';
import { isEmpty } from 'lodash-es';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { getOptionsForDynamicVariable } from './util';
import { VariableItemProps } from './VariableItem';
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
import './DashboardVariableSelection.styles.scss';
type DynamicVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate' | 'existingVariables'
>;
// eslint-disable-next-line sonarjs/cognitive-complexity
function DynamicVariableInput({
variableData,
onValueUpdate,
existingVariables,
}: DynamicVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const [isRetryableError, setIsRetryableError] = useState<boolean>(true);
const [isComplete, setIsComplete] = useState<boolean>(false);
const [filteredOptionsData, setFilteredOptionsData] = useState<
(string | number | boolean)[]
>([]);
const [relatedValues, setRelatedValues] = useState<string[]>([]);
const [originalRelatedValues, setOriginalRelatedValues] = useState<string[]>(
[],
);
// Track dropdown open state for auto-checking new values
const [isDropdownOpen, setIsDropdownOpen] = useState<boolean>(false);
const [apiSearchText, setApiSearchText] = useState<string>('');
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
[optionsData, relatedValues],
);
const {
value,
tempSelection,
setTempSelection,
handleClear,
enableSelectAll,
defaultValue,
applyDefaultIfNeeded,
onChange,
onDropdownVisibleChange,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy: dynamicVariableSelectStrategy,
allAvailableOptionStrings,
});
// Create a dependency key from all dynamic variables
const dynamicVariablesKey = useMemo(() => {
if (!existingVariables) {
return 'no_variables';
}
const dynamicVars = Object.values(existingVariables)
.filter((v) => v.type === 'DYNAMIC')
.map(
(v) => `${v.name || 'unnamed'}:${JSON.stringify(v.selectedValue || null)}`,
)
.join('|');
return dynamicVars || 'no_dynamic_variables';
}, [existingVariables]);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
// eslint-disable-next-line sonarjs/cognitive-complexity
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((val) => val !== null && val !== undefined && val !== '')
.map((val) => val.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((val) => {
// Check if value is a number
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${val.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
const handleSelectDropdownVisibilityChange = useCallback(
(visible: boolean): void => {
setIsDropdownOpen(visible);
onDropdownVisibleChange(visible);
if (!visible) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
setApiSearchText('');
}
},
[onDropdownVisibleChange, optionsData, originalRelatedValues],
);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || `variable_${variableData.id}`,
dynamicVariablesKey,
minTime,
maxTime,
debouncedApiSearchText,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
],
{
enabled:
variableData.type === 'DYNAMIC' &&
!!variableData.dynamicVariablesSource &&
!!variableData.dynamicVariablesAttribute,
queryFn: () =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
: (variableData.dynamicVariablesSource?.toLowerCase() as
| 'traces'
| 'logs'
| 'metrics'),
variableData.dynamicVariablesAttribute,
debouncedApiSearchText,
minTime,
maxTime,
existingQuery,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
const allNewOptions = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
applyDefaultIfNeeded(allNewOptions);
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
},
);
const handleRetry = useCallback((): void => {
setErrorMessage(null);
setIsRetryableError(true);
refetch();
}, [refetch]);
const handleSearch = useCallback(
(text: string) => {
if (isComplete) {
if (!text) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
return;
}
const lowerText = text.toLowerCase();
setFilteredOptionsData(
optionsData.filter((option) =>
option.toString().toLowerCase().includes(lowerText),
),
);
setRelatedValues(
originalRelatedValues.filter((val) =>
val.toLowerCase().includes(lowerText),
),
);
} else {
setApiSearchText(text);
}
},
[isComplete, optionsData, originalRelatedValues],
);
const selectOptions = useMemo(
() =>
getOptionsForDynamicVariable(filteredOptionsData || [], relatedValues || []),
[filteredOptionsData, relatedValues],
);
return (
<SelectVariableInput
variableId={variableData.id}
options={selectOptions}
value={value}
onChange={onChange}
onDropdownVisibleChange={handleSelectDropdownVisibilityChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
// dynamic variable specific + API related props
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
isDynamicVariable
showRetryButton={isRetryableError}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
onSearch={handleSearch}
/>
);
}
export default memo(DynamicVariableInput);

View File

@@ -1,602 +0,0 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable no-nested-ternary */
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import useDebounce from 'hooks/useDebounce';
import { isEmpty, isUndefined } from 'lodash-es';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
import { SelectItemStyle } from './styles';
import {
areArraysEqual,
getOptionsForDynamicVariable,
getSelectValue,
uniqueValues,
} from './util';
import './DashboardVariableSelection.styles.scss';
interface DynamicVariableSelectionProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
arg1: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}
function DynamicVariableSelection({
variableData,
onValueUpdate,
existingVariables,
}: DynamicVariableSelectionProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const [isRetryableError, setIsRetryableError] = useState<boolean>(true);
const [isComplete, setIsComplete] = useState<boolean>(false);
const [filteredOptionsData, setFilteredOptionsData] = useState<
(string | number | boolean)[]
>([]);
const [relatedValues, setRelatedValues] = useState<string[]>([]);
const [originalRelatedValues, setOriginalRelatedValues] = useState<string[]>(
[],
);
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
// Track dropdown open state for auto-checking new values
const [isDropdownOpen, setIsDropdownOpen] = useState<boolean>(false);
// Create a dependency key from all dynamic variables
const dynamicVariablesKey = useMemo(() => {
if (!existingVariables) {
return 'no_variables';
}
const dynamicVars = Object.values(existingVariables)
.filter((v) => v.type === 'DYNAMIC')
.map(
(v) => `${v.name || 'unnamed'}:${JSON.stringify(v.selectedValue || null)}`,
)
.join('|');
return dynamicVars || 'no_dynamic_variables';
}, [existingVariables]);
const [apiSearchText, setApiSearchText] = useState<string>('');
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((value) => value !== null && value !== undefined && value !== '')
.map((value) => value.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((value) => {
// Check if value is a number
const numValue = Number(value);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return value; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${value.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || `variable_${variableData.id}`,
dynamicVariablesKey,
minTime,
maxTime,
debouncedApiSearchText,
],
{
enabled: variableData.type === 'DYNAMIC',
queryFn: () =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
: (variableData.dynamicVariablesSource?.toLowerCase() as
| 'traces'
| 'logs'
| 'metrics'),
variableData.dynamicVariablesAttribute,
debouncedApiSearchText,
minTime,
maxTime,
existingQuery,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
},
);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
if (
value === ALL_SELECT_VALUE ||
(Array.isArray(value) && value.includes(ALL_SELECT_VALUE))
) {
// For ALL selection in dynamic variables, pass null to avoid storing values
// The parent component will handle this appropriately
onValueUpdate(variableData.name, variableData.id, null, true);
} else {
// Build union of available options shown in dropdown (normalized + related)
const allAvailableOptionStrings = [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
];
const haveCustomValuesSelected =
Array.isArray(value) &&
!value.every((v) => allAvailableOptionStrings.includes(v.toString()));
onValueUpdate(
variableData.name,
variableData.id,
value,
allAvailableOptionStrings.every((v) => value.includes(v.toString())),
haveCustomValuesSelected,
);
}
}
},
[variableData, onValueUpdate, optionsData, relatedValues],
);
useEffect(() => {
if (
variableData.dynamicVariablesSource &&
variableData.dynamicVariablesAttribute
) {
refetch();
}
}, [
refetch,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
debouncedApiSearchText,
]);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
[optionsData, relatedValues],
);
const handleSearch = useCallback(
(text: string) => {
if (isComplete) {
if (!text) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
return;
}
const localFilteredOptionsData: (string | number | boolean)[] = [];
optionsData.forEach((option) => {
if (option.toString().toLowerCase().includes(text.toLowerCase())) {
localFilteredOptionsData.push(option);
}
});
setFilteredOptionsData(localFilteredOptionsData);
setRelatedValues(
originalRelatedValues.filter((value) =>
value.toLowerCase().includes(text.toLowerCase()),
),
);
} else {
setApiSearchText(text);
}
},
[isComplete, optionsData, originalRelatedValues],
);
const { selectedValue } = variableData;
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? ALL_SELECT_VALUE
: selectedValueStringified;
// Add a handler for tracking temporary selection changes
const handleTempChange = useCallback(
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
const sanitizedValue = uniqueValues(value);
setTempSelection(sanitizedValue);
},
[variableData.multiSelect],
);
// Handle dropdown visibility changes
const handleDropdownVisibleChange = (visible: boolean): void => {
// Update dropdown open state for auto-checking
setIsDropdownOpen(visible);
// Initialize temp selection when opening dropdown
if (visible) {
if (isUndefined(tempSelection) && selectValue === ALL_SELECT_VALUE) {
// When ALL is selected, set selection to exactly the latest available values
const latestAll = [...allAvailableOptionStrings];
setTempSelection(latestAll);
} else {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Only call handleChange if there's actually a change in the selection
const currentValue = variableData.selectedValue;
// Helper function to check if arrays have the same elements regardless of order
const areArraysEqualIgnoreOrder = (a: any[], b: any[]): boolean => {
if (a.length !== b.length) {
return false;
}
const sortedA = [...a].sort();
const sortedB = [...b].sort();
return areArraysEqual(sortedA, sortedB);
};
// If ALL was selected before and remains ALL after, skip updating
const wasAllSelected = enableSelectAll && variableData.allSelected;
const isAllSelectedAfter =
enableSelectAll &&
Array.isArray(tempSelection) &&
tempSelection.length === allAvailableOptionStrings.length &&
allAvailableOptionStrings.every((v) => tempSelection.includes(v));
if (wasAllSelected && isAllSelectedAfter) {
setTempSelection(undefined);
return;
}
const hasChanged =
tempSelection !== currentValue &&
!(
Array.isArray(tempSelection) &&
Array.isArray(currentValue) &&
areArraysEqualIgnoreOrder(tempSelection, currentValue)
);
if (hasChanged) {
handleChange(tempSelection);
}
setTempSelection(undefined);
}
// Always reset filtered data when dropdown closes, regardless of tempSelection state
if (!visible) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
setApiSearchText('');
}
};
useEffect(
() => (): void => {
// Cleanup on unmount
setTempSelection(undefined);
setFilteredOptionsData([]);
setRelatedValues([]);
setApiSearchText('');
},
[],
);
// eslint-disable-next-line sonarjs/cognitive-complexity
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else if (variableData.allSelected) {
// If ALL is selected but no stored values, derive from available options
// This handles the case where we don't store values in localStorage for ALL
value = allAvailableOptionStrings;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
variableData.allSelected,
selectedValue,
tempSelection,
optionsData,
allAvailableOptionStrings,
]);
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
return (
<div className="variable-item">
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
{variableData.description && (
<Tooltip title={variableData.description}>
<InfoCircleOutlined className="info-icon" />
</Tooltip>
)}
</Typography.Text>
<div className="variable-value">
{variableData.multiSelect ? (
<CustomMultiSelect
key={variableData.id}
options={getOptionsForDynamicVariable(
filteredOptionsData || [],
relatedValues || [],
)}
defaultValue={variableData.defaultValue}
onChange={handleTempChange}
bordered={false}
placeholder="Select value"
placement="bottomLeft"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
maxTagCount={2}
getPopupContainer={popupContainer}
value={
(tempSelection || selectValue) === ALL_SELECT_VALUE
? 'ALL'
: tempSelection || selectValue
}
onDropdownVisibleChange={handleDropdownVisibleChange}
errorMessage={errorMessage}
// eslint-disable-next-line react/no-unstable-nested-components
maxTagPlaceholder={(omittedValues): JSX.Element => {
const maxDisplayValues = 10;
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
const hasMore = omittedValues.length > maxDisplayValues;
const tooltipText =
valuesToShow.map(({ value }) => value).join(', ') +
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}}
onClear={(): void => {
handleChange([]);
}}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
onSearch={handleSearch}
onRetry={(): void => {
setErrorMessage(null);
setIsRetryableError(true);
refetch();
}}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
isDynamicVariable
showRetryButton={isRetryableError}
/>
) : (
<CustomSelect
key={variableData.id}
onChange={handleChange}
bordered={false}
placeholder="Select value"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
getPopupContainer={popupContainer}
options={getOptionsForDynamicVariable(
filteredOptionsData || [],
relatedValues || [],
)}
value={selectValue}
defaultValue={variableData.defaultValue}
errorMessage={errorMessage}
onSearch={handleSearch}
// eslint-disable-next-line sonarjs/no-identical-functions
onRetry={(): void => {
setErrorMessage(null);
setIsRetryableError(true);
refetch();
}}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
isDynamicVariable
showRetryButton={isRetryableError}
/>
)}
</div>
</div>
);
}
export default DynamicVariableSelection;

View File

@@ -1,13 +1,11 @@
import { memo, useCallback, useState } from 'react';
import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -15,20 +13,18 @@ import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
import { VariableItemProps } from './VariableItem';
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
interface QueryVariableInputProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
type QueryVariableInputProps = Pick<
VariableItemProps,
| 'variableData'
| 'existingVariables'
| 'onValueUpdate'
| 'variablesToGetUpdated'
| 'setVariablesToGetUpdated'
| 'dependencyData'
>;
function QueryVariableInput({
variableData,
@@ -56,13 +52,15 @@ function QueryVariableInput({
onChange,
onDropdownVisibleChange,
handleClear,
applyDefaultIfNeeded,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy: queryVariableSelectStrategy,
});
const validVariableUpdate = (): boolean => {
const validVariableUpdate = useCallback((): boolean => {
if (!variableData.name) {
return false;
}
@@ -70,86 +68,100 @@ function QueryVariableInput({
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
}, [variableData.name, variablesToGetUpdated]);
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
const getOptions = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
valueNotInList = true;
}
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
setOptionsData(newOptionsData);
// Apply default if no value is selected (e.g., new variable, first load)
applyDefaultIfNeeded(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
} catch (e) {
console.error(e);
}
};
},
[
variableData,
optionsData,
onValueUpdate,
tempSelection,
setTempSelection,
validVariableUpdate,
setVariablesToGetUpdated,
applyDefaultIfNeeded,
],
);
const { isLoading, refetch } = useQuery(
[
@@ -162,7 +174,6 @@ function QueryVariableInput({
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
@@ -207,10 +218,19 @@ function QueryVariableInput({
refetch();
}, [refetch]);
const selectOptions = useMemo(
() =>
optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[optionsData],
);
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
options={selectOptions}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}

View File

@@ -3,6 +3,7 @@ import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Popover, Tooltip, Typography } from 'antd';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { OptionData } from 'components/NewSelect/types';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
@@ -12,7 +13,7 @@ const errorIconStyle = { margin: '0 0.5rem' };
interface SelectVariableInputProps {
variableId: string;
options: (string | number | boolean)[];
options: OptionData[];
value: string | string[] | undefined;
enableSelectAll: boolean;
isMultiSelect: boolean;
@@ -23,13 +24,17 @@ interface SelectVariableInputProps {
loading?: boolean;
errorMessage?: string | null;
onRetry?: () => void;
isDynamicVariable?: boolean;
showRetryButton?: boolean;
showIncompleteDataMessage?: boolean;
onSearch?: (searchTerm: string) => void;
}
const MAX_TAG_DISPLAY_VALUES = 10;
function maxTagPlaceholder(
export const renderMaxTagPlaceholder = (
omittedValues: { label?: React.ReactNode; value?: string | number }[],
): JSX.Element {
): JSX.Element => {
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
const tooltipText =
@@ -41,7 +46,7 @@ function maxTagPlaceholder(
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}
};
function SelectVariableInput({
variableId,
@@ -56,16 +61,11 @@ function SelectVariableInput({
enableSelectAll,
isMultiSelect,
defaultValue,
isDynamicVariable,
showRetryButton,
showIncompleteDataMessage,
onSearch,
}: SelectVariableInputProps): JSX.Element {
const selectOptions = useMemo(
() =>
options.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[options],
);
const commonProps = useMemo(
() => ({
// main props
@@ -82,23 +82,33 @@ function SelectVariableInput({
showSearch: true,
bordered: false,
// dynamic props
// changing props
'data-testid': 'variable-select',
onChange,
loading,
options: selectOptions,
options,
errorMessage,
onRetry,
// dynamic variable only props
isDynamicVariable,
showRetryButton,
showIncompleteDataMessage,
onSearch,
}),
[
variableId,
defaultValue,
onChange,
loading,
selectOptions,
options,
value,
errorMessage,
onRetry,
isDynamicVariable,
showRetryButton,
showIncompleteDataMessage,
onSearch,
],
);
@@ -110,11 +120,11 @@ function SelectVariableInput({
placement="bottomLeft"
maxTagCount={2}
onDropdownVisibleChange={onDropdownVisibleChange}
maxTagPlaceholder={maxTagPlaceholder}
maxTagPlaceholder={renderMaxTagPlaceholder}
onClear={onClear}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
allowClear={value !== ALL_SELECT_VALUE}
/>
) : (
<CustomSelect {...commonProps} />

View File

@@ -5,6 +5,7 @@ import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/da
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import CustomVariableInput from './CustomVariableInput';
import DynamicVariableInput from './DynamicVariableInput';
import QueryVariableInput from './QueryVariableInput';
import TextboxVariableInput from './TextboxVariableInput';
@@ -16,8 +17,9 @@ export interface VariableItemProps {
onValueUpdate: (
name: string,
id: string,
arg1: IDashboardVariable['selectedValue'],
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
@@ -68,6 +70,13 @@ function VariableItem({
dependencyData={dependencyData}
/>
)}
{variableType === 'DYNAMIC' && (
<DynamicVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
/>
)}
</div>
</div>
);

View File

@@ -5,7 +5,7 @@ import * as ReactRedux from 'react-redux';
import { fireEvent, render, screen } from '@testing-library/react';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableSelection from '../DynamicVariableSelection';
import DynamicVariableInput from '../DynamicVariableInput';
// Don't mock the components - use real ones
@@ -54,7 +54,7 @@ const mockApiResponse = {
// Mock scrollIntoView since it's not available in JSDOM
window.HTMLElement.prototype.scrollIntoView = jest.fn();
describe('DynamicVariableSelection Component', () => {
describe('DynamicVariableInput Component', () => {
const mockOnValueUpdate = jest.fn();
const mockDynamicVariableData: IDashboardVariable = {
@@ -108,18 +108,13 @@ describe('DynamicVariableSelection Component', () => {
it('renders with single select variable correctly', () => {
render(
<DynamicVariableSelection
<DynamicVariableInput
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify component renders correctly
expect(
screen.getByText(`$${mockDynamicVariableData.name}`),
).toBeInTheDocument();
// Verify the selected value is displayed
const selectedItem = screen.getByRole('combobox');
expect(selectedItem).toBeInTheDocument();
@@ -136,18 +131,13 @@ describe('DynamicVariableSelection Component', () => {
};
render(
<DynamicVariableSelection
<DynamicVariableInput
variableData={multiSelectWithAllSelected}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify variable name is rendered
expect(
screen.getByText(`$${multiSelectWithAllSelected.name}`),
).toBeInTheDocument();
// In ALL selected mode, there should be an "ALL" text element
expect(screen.getByText('ALL')).toBeInTheDocument();
});
@@ -164,18 +154,13 @@ describe('DynamicVariableSelection Component', () => {
});
render(
<DynamicVariableSelection
<DynamicVariableInput
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify component renders in loading state
expect(
screen.getByText(`$${mockDynamicVariableData.name}`),
).toBeInTheDocument();
// Open dropdown to see loading text
const selectElement = screen.getByRole('combobox');
fireEvent.mouseDown(selectElement);
@@ -199,18 +184,13 @@ describe('DynamicVariableSelection Component', () => {
});
render(
<DynamicVariableSelection
<DynamicVariableInput
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify the component renders
expect(
screen.getByText(`$${mockDynamicVariableData.name}`),
).toBeInTheDocument();
// For error states, we should check that error handling is in place
// Without opening the dropdown as the error message might be handled differently
expect(ReactQuery.useQuery).toHaveBeenCalled();
@@ -219,7 +199,7 @@ describe('DynamicVariableSelection Component', () => {
it('makes API call to fetch variable values', () => {
render(
<DynamicVariableSelection
<DynamicVariableInput
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
@@ -235,6 +215,8 @@ describe('DynamicVariableSelection Component', () => {
'2023-01-01T00:00:00Z', // minTime from useSelector mock
'2023-01-02T00:00:00Z', // maxTime from useSelector mock
'',
'Traces',
'service.name',
],
expect.objectContaining({
enabled: true, // Type is 'DYNAMIC'
@@ -255,16 +237,13 @@ describe('DynamicVariableSelection Component', () => {
};
render(
<DynamicVariableSelection
<DynamicVariableInput
variableData={customVariable}
existingVariables={{ ...mockExistingVariables, custom1: customVariable }}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify the component correctly displays the selected value
expect(screen.getByText(`$${customVariable.name}`)).toBeInTheDocument();
// Find the selection item in the component using data-testid
const selectElement = screen.getByTestId('variable-select');
expect(selectElement).toBeInTheDocument();

View File

@@ -63,10 +63,10 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(screen.getByText('option1')).toBeInTheDocument();
expect(await screen.findByText('option1')).toBeInTheDocument();
});
test('should show placeholder when no previous and no default', async () => {
test('should auto-select first option when no previous and no default', async () => {
const variable: IDashboardVariable = {
id: TEST_VARIABLE_ID,
name: TEST_VARIABLE_NAME,
@@ -85,7 +85,8 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(screen.getByText('Select value')).toBeInTheDocument();
// With the new variable select strategy, the first option is auto-selected
expect(await screen.findByText('option1')).toBeInTheDocument();
});
});
@@ -110,7 +111,7 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(screen.getByText('ALL')).toBeInTheDocument();
expect(await screen.findByText('ALL')).toBeInTheDocument();
});
});
@@ -134,7 +135,7 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(screen.getByText('Select value')).toBeInTheDocument();
expect(await screen.findByText('Select value')).toBeInTheDocument();
});
});
});

View File

@@ -1,8 +1,14 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useMemo, useState } from 'react';
import { isEmpty } from 'lodash-es';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { ALL_SELECT_VALUE } from '../utils';
import { areArraysEqual, getSelectValue } from './util';
import { VariableSelectStrategy } from './variableSelectStrategy/variableSelectStrategyTypes';
import {
areArraysEqualIgnoreOrder,
uniqueValues,
} from './variableSelectStrategy/variableSelectStrategyUtils';
interface UseDashboardVariableSelectHelperParams {
variableData: IDashboardVariable;
@@ -12,7 +18,11 @@ interface UseDashboardVariableSelectHelperParams {
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
strategy: VariableSelectStrategy;
/** Override for all available option strings (default: optionsData.map(String)) */
allAvailableOptionStrings?: string[];
}
interface UseDashboardVariableSelectHelperReturn {
@@ -31,6 +41,11 @@ interface UseDashboardVariableSelectHelperReturn {
onChange: (value: string | string[]) => void;
onDropdownVisibleChange: (visible: boolean) => void;
handleClear: () => void;
// Default value helpers
applyDefaultIfNeeded: (
overrideOptions?: (string | number | boolean)[],
) => void;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
@@ -38,6 +53,8 @@ export function useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy,
allAvailableOptionStrings,
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
const { selectedValue } = variableData;
@@ -52,11 +69,37 @@ export function useDashboardVariableSelectHelper({
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const effectiveAllAvailableOptionStrings = useMemo(
() => allAvailableOptionStrings ?? optionsData.map((v) => v.toString()),
[allAvailableOptionStrings, optionsData],
);
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
? ALL_SELECT_VALUE
: selectedValueStringified;
const getDefaultValue = useCallback(
(overrideOptions?: (string | number | boolean)[]) => {
const options = overrideOptions || optionsData;
if (variableData.multiSelect) {
if (variableData.showALLOption) {
return variableData.defaultValue || options.map((o) => o.toString());
}
return variableData.defaultValue || options?.[0]?.toString();
}
return variableData.defaultValue || options[0]?.toString();
},
[
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
optionsData,
],
);
const defaultValue = useMemo(() => getDefaultValue(), [getDefaultValue]);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
@@ -69,29 +112,21 @@ export function useDashboardVariableSelectHelper({
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass optionsData as the value and set allSelected to true
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
strategy.handleChange({
value,
variableData,
optionsData,
allAvailableOptionStrings: effectiveAllAvailableOptionStrings,
onValueUpdate,
});
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
variableData.showALLOption,
onValueUpdate,
variableData,
optionsData,
effectiveAllAvailableOptionStrings,
onValueUpdate,
strategy,
],
);
@@ -99,79 +134,96 @@ export function useDashboardVariableSelectHelper({
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
setTempSelection(uniqueValues(value));
},
[variableData.multiSelect],
);
// Apply default value on first render if no selection exists
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
// Single select onChange: apply default if value is empty
const handleSingleSelectChange = useCallback(
(inputValue: string | string[]): void => {
if (isEmpty(inputValue)) {
if (defaultValue !== undefined) {
handleChange(defaultValue as string | string[]);
}
return;
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
// Apply default values when needed
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
handleChange(inputValue);
},
[handleChange, defaultValue],
);
// Handle dropdown visibility changes
const onDropdownVisibleChange = useCallback(
(visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
if (variableData.allSelected && enableSelectAll) {
// When ALL is selected, show all available options as individually checked
setTempSelection([...effectiveAllAvailableOptionStrings]);
} else {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
// If ALL was selected before AND all options remain selected, skip updating
const wasAllSelected = enableSelectAll && variableData.allSelected;
const isAllSelectedAfter =
enableSelectAll &&
Array.isArray(tempSelection) &&
tempSelection.length === effectiveAllAvailableOptionStrings.length &&
effectiveAllAvailableOptionStrings.every((v) => tempSelection.includes(v));
if (wasAllSelected && isAllSelectedAfter) {
setTempSelection(undefined);
return;
}
// Apply default if closing with empty selection
let valueToApply = tempSelection;
if (isEmpty(tempSelection) && defaultValue !== undefined) {
valueToApply = defaultValue as string | string[];
}
// Order-agnostic change detection
const currentValue = variableData.selectedValue;
const hasChanged =
valueToApply !== currentValue &&
!(
Array.isArray(valueToApply) &&
Array.isArray(currentValue) &&
areArraysEqualIgnoreOrder(valueToApply, currentValue)
);
if (hasChanged) {
handleChange(valueToApply);
}
setTempSelection(undefined);
}
},
[variableData, tempSelection, handleChange],
[
variableData,
enableSelectAll,
effectiveAllAvailableOptionStrings,
tempSelection,
handleChange,
defaultValue,
],
);
// Explicit function for callers to apply default on mount / data load
// Pass overrideOptions when freshly-loaded options aren't in state yet (async callers)
const applyDefaultIfNeeded = useCallback(
(overrideOptions?: (string | number | boolean)[]): void => {
if (isEmpty(selectValue)) {
const defaultValueFromOptions = getDefaultValue(overrideOptions);
if (defaultValueFromOptions !== undefined) {
handleChange(defaultValueFromOptions as string | string[]);
}
}
},
[selectValue, handleChange, getDefaultValue],
);
const handleClear = useCallback((): void => {
@@ -182,11 +234,9 @@ export function useDashboardVariableSelectHelper({
? tempSelection || selectValue
: selectValue;
const defaultValue = variableData.defaultValue || selectValue;
const onChange = useMemo(() => {
return variableData.multiSelect ? handleTempChange : handleChange;
}, [variableData.multiSelect, handleTempChange, handleChange]);
return variableData.multiSelect ? handleTempChange : handleSingleSelectChange;
}, [variableData.multiSelect, handleTempChange, handleSingleSelectChange]);
return {
tempSelection,
@@ -197,5 +247,6 @@ export function useDashboardVariableSelectHelper({
value,
defaultValue,
onChange,
applyDefaultIfNeeded,
};
}

View File

@@ -363,25 +363,6 @@ export const uniqueOptions = (options: OptionData[]): OptionData[] => {
return uniqueOptions;
};
export const uniqueValues = (values: string[] | string): string[] | string => {
if (Array.isArray(values)) {
const uniqueValues: string[] = [];
const seenValues = new Set<string>();
values.forEach((value) => {
if (seenValues.has(value)) {
return;
}
seenValues.add(value);
uniqueValues.push(value);
});
return uniqueValues;
}
return values;
};
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,

View File

@@ -0,0 +1,4 @@
import { defaultVariableSelectStrategy } from './defaultVariableSelectStrategy';
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const customVariableSelectStrategy: VariableSelectStrategy = defaultVariableSelectStrategy;

View File

@@ -0,0 +1,20 @@
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const defaultVariableSelectStrategy: VariableSelectStrategy = {
handleChange({ value, variableData, optionsData, onValueUpdate }) {
if (!variableData.name) {
return;
}
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
},
};

View File

@@ -0,0 +1,37 @@
import { ALL_SELECT_VALUE } from 'container/DashboardContainer/utils';
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const dynamicVariableSelectStrategy: VariableSelectStrategy = {
handleChange({
value,
variableData,
allAvailableOptionStrings,
onValueUpdate,
}) {
if (!variableData.name) {
return;
}
if (
value === ALL_SELECT_VALUE ||
(Array.isArray(value) && value.includes(ALL_SELECT_VALUE))
) {
onValueUpdate(variableData.name, variableData.id, null, true);
} else {
// For ALL selection in dynamic variables, pass null to avoid storing values
// The parent component will handle this appropriately
const haveCustomValuesSelected =
Array.isArray(value) &&
!value.every((v) => allAvailableOptionStrings.includes(v.toString()));
onValueUpdate(
variableData.name,
variableData.id,
value,
allAvailableOptionStrings.every((v) => value.includes(v.toString())),
haveCustomValuesSelected,
);
}
},
};

View File

@@ -0,0 +1,4 @@
import { defaultVariableSelectStrategy } from './defaultVariableSelectStrategy';
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const queryVariableSelectStrategy: VariableSelectStrategy = defaultVariableSelectStrategy;

View File

@@ -0,0 +1,17 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export interface VariableSelectStrategy {
handleChange(params: {
value: string | string[];
variableData: IDashboardVariable;
optionsData: (string | number | boolean)[];
allAvailableOptionStrings: string[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}): void;
}

View File

@@ -0,0 +1,32 @@
import { isEqual } from 'lodash-es';
export const areArraysEqualIgnoreOrder = (
a: (string | number | boolean)[],
b: (string | number | boolean)[],
): boolean => {
if (a.length !== b.length) {
return false;
}
const sortedA = [...a].sort();
const sortedB = [...b].sort();
return isEqual(sortedA, sortedB);
};
export const uniqueValues = (values: string[] | string): string[] | string => {
if (Array.isArray(values)) {
const uniqueValues: string[] = [];
const seenValues = new Set<string>();
values.forEach((value) => {
if (seenValues.has(value)) {
return;
}
seenValues.add(value);
uniqueValues.push(value);
});
return uniqueValues;
}
return values;
};

View File

@@ -2,7 +2,7 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable react/jsx-props-no-spreading */
import React from 'react';
import { QueryClient, QueryClientProvider } from 'react-query';
import { QueryClient, QueryClientProvider, useQuery } from 'react-query';
import * as ReactRedux from 'react-redux';
import {
act,
@@ -15,13 +15,22 @@ import {
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableSelection from '../DashboardVariablesSelection/DynamicVariableSelection';
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
// Mock the getFieldValues API
jest.mock('api/dynamicVariables/getFieldValues', () => ({
getFieldValues: jest.fn(),
}));
// Mock useQuery from react-query
jest.mock('react-query', () => {
const originalModule = jest.requireActual('react-query');
return {
...originalModule,
useQuery: jest.fn(),
};
});
describe('Dynamic Variable Default Behavior', () => {
const mockOnValueUpdate = jest.fn();
const mockApiResponse = {
@@ -59,6 +68,46 @@ describe('Dynamic Variable Default Behavior', () => {
// Mock getFieldValues API to return our test data
(getFieldValues as jest.Mock).mockResolvedValue(mockApiResponse);
// Mock useQuery implementation to avoid infinite re-renders
// and ensure onSuccess is called once
(useQuery as jest.Mock).mockImplementation((key, options) => {
const { onSuccess, enabled, queryFn } = options || {};
const variableName = key[1];
const dynamicVarsKey = key[2];
React.useEffect(() => {
if (enabled !== false) {
if (onSuccess) {
// For 'services' tests:
// 1. "Default to ALL" expectations imply empty options -> [] behavior. This happens when selectedValue is undefined (dynamicVarsKey has 'null').
// 2. "ALL Option Special Value" needs full options to render the "ALL" item in dropdown. This happens when selectedValue is defined.
if (
variableName === 'services' &&
typeof dynamicVarsKey === 'string' &&
dynamicVarsKey.includes('null')
) {
onSuccess({
...mockApiResponse,
data: { ...mockApiResponse.data, normalizedValues: [] },
});
} else {
onSuccess(mockApiResponse);
}
}
if (queryFn) {
queryFn();
}
}
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
return {
isLoading: false,
isError: false,
data: mockApiResponse,
refetch: jest.fn(),
};
});
jest.spyOn(ReactRedux, 'useSelector').mockReturnValue({
minTime: '2023-01-01T00:00:00Z',
maxTime: '2023-01-02T00:00:00Z',
@@ -84,7 +133,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -120,7 +169,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -164,7 +213,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -172,9 +221,6 @@ describe('Dynamic Variable Default Behavior', () => {
);
});
// Component should render without errors
expect(screen.getByText('$service')).toBeInTheDocument();
// Check if the dropdown is present
const selectElement = screen.getByRole('combobox');
expect(selectElement).toBeInTheDocument();
@@ -232,7 +278,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -267,7 +313,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -293,7 +339,7 @@ describe('Dynamic Variable Default Behavior', () => {
expect(screen.queryByText('backend')).not.toBeInTheDocument();
});
it('should default to ALL when no default and no previous selection', async () => {
it('sahould default to ALL when no default and no previous selection', async () => {
const variableData: IDashboardVariable = {
id: 'var21',
name: 'services',
@@ -311,7 +357,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -345,7 +391,7 @@ describe('Dynamic Variable Default Behavior', () => {
expect(mockOnValueUpdate).toHaveBeenCalledWith(
'services',
'var21',
[], // Empty array when allSelected is true
[],
true, // allSelected = true
false,
);
@@ -371,7 +417,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -408,7 +454,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableSelection
<DynamicVariableInput
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -416,9 +462,6 @@ describe('Dynamic Variable Default Behavior', () => {
);
});
// Component should render without errors
expect(screen.getByText('$services')).toBeInTheDocument();
// Check if ALL is displayed in the UI (in the main selection area)
const allTextElement = screen.getByText('ALL');
expect(allTextElement).toBeInTheDocument();

View File

@@ -0,0 +1,412 @@
import type { ReactNode } from 'react';
import { render, screen } from '@testing-library/react';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import type { AlignedData } from 'uplot';
import { PlotContextProvider } from '../../context/PlotContext';
import UPlotChart from '../UPlotChart';
// ---------------------------------------------------------------------------
// Mocks
// ---------------------------------------------------------------------------
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
updateSeriesVisibilityToLocalStorage: jest.fn(),
}),
);
jest.mock('@sentry/react', () => ({
ErrorBoundary: ({ children }: { children: ReactNode }): JSX.Element => (
<>{children}</>
),
}));
jest.mock('pages/ErrorBoundaryFallback/ErrorBoundaryFallback', () => ({
__esModule: true,
default: (): JSX.Element => <div>Error Fallback</div>,
}));
interface MockUPlotInstance {
root: HTMLDivElement;
setData: jest.Mock;
setSize: jest.Mock;
destroy: jest.Mock;
}
let instances: MockUPlotInstance[] = [];
const mockUPlotConstructor = jest.fn();
jest.mock('uplot', () => {
function MockUPlot(
opts: Record<string, unknown>,
data: unknown,
target: HTMLElement,
): MockUPlotInstance {
mockUPlotConstructor(opts, data, target);
const rootEl = document.createElement('div');
target.appendChild(rootEl);
const inst: MockUPlotInstance = {
root: rootEl,
setData: jest.fn(),
setSize: jest.fn(),
destroy: jest.fn(),
};
instances.push(inst);
return inst;
}
MockUPlot.paths = {
spline: jest.fn(() => jest.fn()),
bars: jest.fn(() => jest.fn()),
linear: jest.fn(() => jest.fn()),
stepped: jest.fn(() => jest.fn()),
};
MockUPlot.tzDate = jest.fn();
return { __esModule: true, default: MockUPlot };
});
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
const createMockConfig = (): UPlotConfigBuilder => {
return ({
getConfig: jest.fn().mockReturnValue({
series: [{ value: (): string => '' }],
axes: [],
scales: {},
hooks: {},
cursor: {},
}),
getWidgetId: jest.fn().mockReturnValue(undefined),
getShouldSaveSelectionPreference: jest.fn().mockReturnValue(false),
} as unknown) as UPlotConfigBuilder;
};
const validData: AlignedData = [
[1, 2, 3],
[10, 20, 30],
];
const emptyData: AlignedData = [[]];
const Wrapper = ({ children }: { children: ReactNode }): JSX.Element => (
<PlotContextProvider>{children}</PlotContextProvider>
);
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe('UPlotChart', () => {
beforeEach(() => {
instances = [];
mockUPlotConstructor.mockClear();
});
describe('when data is empty', () => {
it('displays "No Data" message instead of the chart container', () => {
render(
<UPlotChart
config={createMockConfig()}
data={emptyData}
width={600}
height={400}
/>,
{ wrapper: Wrapper },
);
expect(screen.getByText('No Data')).toBeInTheDocument();
expect(screen.queryByTestId('uplot-main-div')).not.toBeInTheDocument();
});
it('sizes the empty-state container to the given width and height', () => {
render(
<UPlotChart
config={createMockConfig()}
data={emptyData}
width={750}
height={350}
/>,
{ wrapper: Wrapper },
);
const noDataContainer = screen
.getByText('No Data')
.closest('.uplot-no-data');
expect(noDataContainer).toHaveStyle({ width: '750px', height: '350px' });
});
it('does not create a uPlot instance', () => {
render(
<UPlotChart
config={createMockConfig()}
data={emptyData}
width={600}
height={400}
/>,
{ wrapper: Wrapper },
);
expect(mockUPlotConstructor).not.toHaveBeenCalled();
});
});
describe('chart container', () => {
it('renders children inside the chart wrapper', () => {
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={600}
height={400}
>
<div data-testid="tooltip-plugin">Tooltip</div>
</UPlotChart>,
{ wrapper: Wrapper },
);
expect(screen.getByTestId('tooltip-plugin')).toBeInTheDocument();
});
});
describe('plot creation', () => {
it('instantiates uPlot with floored dimensions and the container element', () => {
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={600.9}
height={400.2}
/>,
{ wrapper: Wrapper },
);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
const [opts, data, target] = mockUPlotConstructor.mock.calls[0];
expect(opts.width).toBe(600);
expect(opts.height).toBe(400);
expect(data).toBe(validData);
expect(target).toBe(screen.getByTestId('uplot-main-div'));
});
it('merges config builder output into the uPlot options', () => {
const config = createMockConfig();
config.getConfig = jest.fn().mockReturnValue({
series: [{ value: (): string => '' }],
axes: [{ scale: 'y' }],
scales: { y: {} },
hooks: {},
cursor: { show: true },
});
render(
<UPlotChart
config={(config as unknown) as UPlotConfigBuilder}
data={validData}
width={500}
height={300}
/>,
{ wrapper: Wrapper },
);
const [opts] = mockUPlotConstructor.mock.calls[0];
expect(opts.width).toBe(500);
expect(opts.height).toBe(300);
expect(opts.axes).toEqual([{ scale: 'y' }]);
expect(opts.cursor).toEqual({ show: true });
});
it('skips creation when width or height is 0', () => {
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={0}
height={0}
/>,
{ wrapper: Wrapper },
);
expect(mockUPlotConstructor).not.toHaveBeenCalled();
});
});
describe('lifecycle callbacks', () => {
it('invokes plotRef with the uPlot instance after creation', () => {
const plotRef = jest.fn();
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={600}
height={400}
plotRef={plotRef}
/>,
{ wrapper: Wrapper },
);
expect(plotRef).toHaveBeenCalledTimes(1);
expect(plotRef).toHaveBeenCalledWith(instances[0]);
});
it('destroys the instance and notifies callbacks when data becomes empty', () => {
const plotRef = jest.fn();
const onDestroy = jest.fn();
const config = createMockConfig();
const { rerender } = render(
<UPlotChart
config={config}
data={validData}
width={600}
height={400}
plotRef={plotRef}
onDestroy={onDestroy}
/>,
{ wrapper: Wrapper },
);
const firstInstance = instances[0];
plotRef.mockClear();
rerender(
<UPlotChart
config={config}
data={emptyData}
width={600}
height={400}
plotRef={plotRef}
onDestroy={onDestroy}
/>,
);
expect(onDestroy).toHaveBeenCalledWith(firstInstance);
expect(firstInstance.destroy).toHaveBeenCalled();
expect(plotRef).toHaveBeenCalledWith(null);
expect(screen.getByText('No Data')).toBeInTheDocument();
});
it('destroys the previous instance before creating a new one on config change', () => {
const onDestroy = jest.fn();
const config1 = createMockConfig();
const config2 = createMockConfig();
const { rerender } = render(
<UPlotChart
config={config1}
data={validData}
width={600}
height={400}
onDestroy={onDestroy}
/>,
{ wrapper: Wrapper },
);
const firstInstance = instances[0];
rerender(
<UPlotChart
config={config2}
data={validData}
width={600}
height={400}
onDestroy={onDestroy}
/>,
);
expect(onDestroy).toHaveBeenCalledWith(firstInstance);
expect(firstInstance.destroy).toHaveBeenCalled();
expect(instances).toHaveLength(2);
});
});
describe('prop updates', () => {
it('calls setData without recreating the plot when only data changes', () => {
const config = createMockConfig();
const newData: AlignedData = [
[4, 5, 6],
[40, 50, 60],
];
const { rerender } = render(
<UPlotChart config={config} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
const inst = instances[0];
rerender(
<UPlotChart config={config} data={newData} width={600} height={400} />,
);
expect(inst.setData).toHaveBeenCalledWith(newData);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
});
it('calls setSize with floored values when only dimensions change', () => {
const config = createMockConfig();
const { rerender } = render(
<UPlotChart config={config} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
const instance = instances[0];
rerender(
<UPlotChart
config={config}
data={validData}
width={800.7}
height={500.3}
/>,
);
expect(instance.setSize).toHaveBeenCalledWith({ width: 800, height: 500 });
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
});
it('recreates the plot when config changes', () => {
const config1 = createMockConfig();
const config2 = createMockConfig();
const { rerender } = render(
<UPlotChart config={config1} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
rerender(
<UPlotChart config={config2} data={validData} width={600} height={400} />,
);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(2);
});
it('does nothing when all props remain the same', () => {
const config = createMockConfig();
const { rerender } = render(
<UPlotChart config={config} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
const instance = instances[0];
rerender(
<UPlotChart config={config} data={validData} width={600} height={400} />,
);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
expect(instance.setData).not.toHaveBeenCalled();
expect(instance.setSize).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,62 @@
import { isInvalidPlotValue, normalizePlotValue } from '../dataUtils';
describe('dataUtils', () => {
describe('isInvalidPlotValue', () => {
it('treats null and undefined as invalid', () => {
expect(isInvalidPlotValue(null)).toBe(true);
expect(isInvalidPlotValue(undefined)).toBe(true);
});
it('treats finite numbers as valid and non-finite as invalid', () => {
expect(isInvalidPlotValue(0)).toBe(false);
expect(isInvalidPlotValue(123.45)).toBe(false);
expect(isInvalidPlotValue(Number.NaN)).toBe(true);
expect(isInvalidPlotValue(Infinity)).toBe(true);
expect(isInvalidPlotValue(-Infinity)).toBe(true);
});
it('treats well-formed numeric strings as valid', () => {
expect(isInvalidPlotValue('0')).toBe(false);
expect(isInvalidPlotValue('123.45')).toBe(false);
expect(isInvalidPlotValue('-1')).toBe(false);
});
it('treats Infinity/NaN string variants and non-numeric strings as invalid', () => {
expect(isInvalidPlotValue('+Inf')).toBe(true);
expect(isInvalidPlotValue('-Inf')).toBe(true);
expect(isInvalidPlotValue('Infinity')).toBe(true);
expect(isInvalidPlotValue('-Infinity')).toBe(true);
expect(isInvalidPlotValue('NaN')).toBe(true);
expect(isInvalidPlotValue('not-a-number')).toBe(true);
});
it('treats non-number, non-string values as valid (left to caller)', () => {
expect(isInvalidPlotValue({})).toBe(false);
expect(isInvalidPlotValue([])).toBe(false);
expect(isInvalidPlotValue(true)).toBe(false);
});
});
describe('normalizePlotValue', () => {
it('returns null for invalid values detected by isInvalidPlotValue', () => {
expect(normalizePlotValue(null)).toBeNull();
expect(normalizePlotValue(undefined)).toBeNull();
expect(normalizePlotValue(NaN)).toBeNull();
expect(normalizePlotValue(Infinity)).toBeNull();
expect(normalizePlotValue('-Infinity')).toBeNull();
expect(normalizePlotValue('not-a-number')).toBeNull();
});
it('parses valid numeric strings into numbers', () => {
expect(normalizePlotValue('0')).toBe(0);
expect(normalizePlotValue('123.45')).toBe(123.45);
expect(normalizePlotValue('-1')).toBe(-1);
});
it('passes through valid numbers unchanged', () => {
expect(normalizePlotValue(0)).toBe(0);
expect(normalizePlotValue(123)).toBe(123);
expect(normalizePlotValue(42.5)).toBe(42.5);
});
});
});

View File

@@ -0,0 +1,201 @@
import uPlot from 'uplot';
import { DistributionType } from '../../config/types';
import * as scaleUtils from '../scale';
describe('scale utils', () => {
describe('normalizeLogScaleLimits', () => {
it('returns limits unchanged when distribution is not logarithmic', () => {
const limits = {
min: 1,
max: 100,
softMin: 5,
softMax: 50,
};
const result = scaleUtils.normalizeLogScaleLimits({
distr: DistributionType.Linear,
logBase: 10,
limits,
});
expect(result).toEqual(limits);
});
it('snaps positive limits to powers of the log base when distribution is logarithmic', () => {
const result = scaleUtils.normalizeLogScaleLimits({
distr: DistributionType.Logarithmic,
logBase: 10,
limits: {
min: 3,
max: 900,
softMin: 12,
softMax: 85,
},
});
expect(result.min).toBe(1); // 10^0
expect(result.max).toBe(1000); // 10^3
expect(result.softMin).toBe(10); // 10^1
expect(result.softMax).toBe(100); // 10^2
});
});
describe('getDistributionConfig', () => {
it('returns empty config for time scales', () => {
const config = scaleUtils.getDistributionConfig({
time: true,
distr: DistributionType.Linear,
logBase: 2,
});
expect(config).toEqual({});
});
it('returns linear distribution settings for non-time scales', () => {
const config = scaleUtils.getDistributionConfig({
time: false,
distr: DistributionType.Linear,
logBase: 2,
});
expect(config.distr).toBe(1);
expect(config.log).toBe(2);
});
it('returns log distribution settings for non-time scales', () => {
const config = scaleUtils.getDistributionConfig({
time: false,
distr: DistributionType.Logarithmic,
logBase: 10,
});
expect(config.distr).toBe(3);
expect(config.log).toBe(10);
});
});
describe('getRangeConfig', () => {
it('computes range config and fixed range flags correctly', () => {
const {
rangeConfig,
hardMinOnly,
hardMaxOnly,
hasFixedRange,
} = scaleUtils.getRangeConfig(0, 100, null, null, 0.1, 0.2);
expect(rangeConfig.min).toEqual({
pad: 0.1,
hard: 0,
soft: undefined,
mode: 3,
});
expect(rangeConfig.max).toEqual({
pad: 0.2,
hard: 100,
soft: undefined,
mode: 3,
});
expect(hardMinOnly).toBe(true);
expect(hardMaxOnly).toBe(true);
expect(hasFixedRange).toBe(true);
});
});
describe('createRangeFunction', () => {
it('returns [dataMin, dataMax] when no fixed range and no data', () => {
const params = {
rangeConfig: {} as uPlot.Range.Config,
hardMinOnly: false,
hardMaxOnly: false,
hasFixedRange: false,
min: null,
max: null,
};
const rangeFn = scaleUtils.createRangeFunction(params);
const u = ({
scales: {
y: {
distr: 1,
log: 10,
},
},
} as unknown) as uPlot;
const result = rangeFn(
u,
(null as unknown) as number,
(null as unknown) as number,
'y',
);
expect(result).toEqual([null, null]);
});
it('applies hard min/max for linear scale when only hard limits are set', () => {
const params = {
rangeConfig: {} as uPlot.Range.Config,
hardMinOnly: true,
hardMaxOnly: true,
hasFixedRange: true,
min: 0,
max: 100,
};
const rangeFn = scaleUtils.createRangeFunction(params);
// Use an undefined distr so the range function skips calling uPlot.rangeNum
// and we can focus on the behavior of applyHardLimits.
const u = ({
scales: {
y: {
distr: undefined,
log: 10,
},
},
} as unknown) as uPlot;
const result = rangeFn(u, 10, 20, 'y');
// After applyHardLimits, the returned range should respect configured min/max
expect(result).toEqual([0, 100]);
});
});
describe('adjustSoftLimitsWithThresholds', () => {
it('returns original soft limits when there are no thresholds', () => {
const result = scaleUtils.adjustSoftLimitsWithThresholds(1, 5, [], 'ms');
expect(result).toEqual({ softMin: 1, softMax: 5 });
});
it('expands soft limits to include threshold min/max values', () => {
const result = scaleUtils.adjustSoftLimitsWithThresholds(
3,
6,
[{ thresholdValue: 2 }, { thresholdValue: 8 }],
'ms',
);
// min should be pulled down to the smallest threshold value
expect(result.softMin).toBe(2);
// max should be pushed up to the largest threshold value
expect(result.softMax).toBe(8);
});
});
describe('getFallbackMinMaxTimeStamp', () => {
it('returns a 24-hour window ending at approximately now', () => {
const { fallbackMin, fallbackMax } = scaleUtils.getFallbackMinMaxTimeStamp();
// Difference should be exactly one day in seconds
expect(fallbackMax - fallbackMin).toBe(86400);
// Both should be reasonable timestamps (not NaN or negative)
expect(fallbackMin).toBeGreaterThan(0);
expect(fallbackMax).toBeGreaterThan(fallbackMin);
});
});
});

View File

@@ -0,0 +1,36 @@
import { findMinMaxThresholdValues } from '../threshold';
describe('findMinMaxThresholdValues', () => {
it('returns [null, null] when thresholds array is empty or missing', () => {
expect(findMinMaxThresholdValues([], 'ms')).toEqual([null, null]);
// @ts-expect-error intentional undefined to cover defensive branch
expect(findMinMaxThresholdValues(undefined, 'ms')).toEqual([null, null]);
});
it('returns min and max from thresholdValue when units are not provided', () => {
const thresholds = [
{ thresholdValue: 5 },
{ thresholdValue: 1 },
{ thresholdValue: 10 },
];
const [min, max] = findMinMaxThresholdValues(thresholds);
expect(min).toBe(1);
expect(max).toBe(10);
});
it('ignores thresholds without a value or with unconvertible units', () => {
const thresholds = [
// Should be ignored: convertValue returns null for unknown unit
{ thresholdValue: 100, thresholdUnit: 'unknown-unit' },
// Should be used
{ thresholdValue: 4 },
];
const [min, max] = findMinMaxThresholdValues(thresholds, 'ms');
expect(min).toBe(4);
expect(max).toBe(4);
});
});

View File

@@ -1,44 +0,0 @@
package middleware
import (
"log/slog"
"net/http"
"runtime/debug"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
)
// Recovery is a middleware that recovers from panics, logs the panic,
// and returns a 500 Internal Server Error.
type Recovery struct {
logger *slog.Logger
}
// NewRecovery creates a new Recovery middleware.
func NewRecovery(logger *slog.Logger) Wrapper {
return &Recovery{
logger: logger.With("pkg", "http-middleware-recovery"),
}
}
// Wrap is the middleware handler.
func (m *Recovery) Wrap(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer func() {
if err := recover(); err != nil {
m.logger.ErrorContext(
r.Context(),
"panic recovered",
"err", err, "stack", string(debug.Stack()),
)
render.Error(w, errors.NewInternalf(
errors.CodeInternal, "internal server error",
))
}
}()
next.ServeHTTP(w, r)
})
}

View File

@@ -13,7 +13,6 @@ import (
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
@@ -463,7 +462,7 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
return
}
@@ -508,7 +507,7 @@ func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
return
}

View File

@@ -19,7 +19,6 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/dustin/go-humanize"
@@ -172,7 +171,7 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID)
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotent multiple calls to assign won't cause issues in case of retries.
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
@@ -287,7 +286,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
if slices.Contains(integrationstypes.IntegrationUserEmails, user.Email) {
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
}
@@ -301,7 +300,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
}
// since revoke is idempotent multiple calls to revoke won't cause issues in case of retries
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err

View File

@@ -830,7 +830,7 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU
func (r *ClickHouseReader) GetSpansForTrace(ctx context.Context, traceID string, traceDetailsQuery string) ([]model.SpanItemV2, *model.ApiError) {
var traceSummary model.TraceSummary
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
summaryQuery := fmt.Sprintf("SELECT * from %s.%s FINAL WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
err := r.db.QueryRow(ctx, summaryQuery, traceID).Scan(&traceSummary.TraceID, &traceSummary.Start, &traceSummary.End, &traceSummary.NumSpans)
if err != nil {
if err == sql.ErrNoRows {
@@ -6458,7 +6458,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
}
var traceSummary model.TraceSummary
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
summaryQuery := fmt.Sprintf("SELECT * from %s.%s FINAL WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
err := r.db.QueryRow(ctx, summaryQuery, params.TraceID).Scan(&traceSummary.TraceID, &traceSummary.Start, &traceSummary.End, &traceSummary.NumSpans)
if err != nil {
if err == sql.ErrNoRows {

View File

@@ -1,4 +1,4 @@
package store
package cloudintegrations
import (
"context"
@@ -7,50 +7,49 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
CodeCloudIntegrationAccountNotFound errors.Code = errors.MustNewCode("cloud_integration_account_not_found")
)
type cloudProviderAccountsRepository interface {
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
type CloudProviderAccountsRepository interface {
ListConnected(ctx context.Context, orgId string, provider string) ([]integrationstypes.CloudIntegration, error)
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
Get(ctx context.Context, orgId string, provider string, id string) (*integrationstypes.CloudIntegration, error)
GetConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*integrationstypes.CloudIntegration, error)
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
// Insert an account or update it by (cloudProvider, id)
// for specified non-empty fields
Upsert(
upsert(
ctx context.Context,
orgId string,
provider string,
id *string,
config []byte,
config *types.AccountConfig,
accountId *string,
agentReport *integrationstypes.AgentReport,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*integrationstypes.CloudIntegration, error)
) (*types.CloudIntegration, *model.ApiError)
}
func NewCloudProviderAccountsRepository(store sqlstore.SQLStore) CloudProviderAccountsRepository {
return &cloudProviderAccountsSQLRepository{store: store}
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
*cloudProviderAccountsSQLRepository, error,
) {
return &cloudProviderAccountsSQLRepository{
store: store,
}, nil
}
type cloudProviderAccountsSQLRepository struct {
store sqlstore.SQLStore
}
func (r *cloudProviderAccountsSQLRepository) ListConnected(
func (r *cloudProviderAccountsSQLRepository) listConnected(
ctx context.Context, orgId string, cloudProvider string,
) ([]integrationstypes.CloudIntegration, error) {
accounts := []integrationstypes.CloudIntegration{}
) ([]types.CloudIntegration, *model.ApiError) {
accounts := []types.CloudIntegration{}
err := r.store.BunDB().NewSelect().
Model(&accounts).
@@ -63,16 +62,18 @@ func (r *cloudProviderAccountsSQLRepository) ListConnected(
Scan(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not query connected cloud accounts")
return nil, model.InternalError(fmt.Errorf(
"could not query connected cloud accounts: %w", err,
))
}
return accounts, nil
}
func (r *cloudProviderAccountsSQLRepository) Get(
func (r *cloudProviderAccountsSQLRepository) get(
ctx context.Context, orgId string, provider string, id string,
) (*integrationstypes.CloudIntegration, error) {
var result integrationstypes.CloudIntegration
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -81,25 +82,23 @@ func (r *cloudProviderAccountsSQLRepository) Get(
Where("id = ?", id).
Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(
err,
CodeCloudIntegrationAccountNotFound,
"couldn't find account with Id %s", id,
)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find account with Id %s", id,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud provider accounts: %w", err,
))
}
return &result, nil
}
func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
ctx context.Context, orgId string, provider string, accountId string,
) (*integrationstypes.CloudIntegration, error) {
var result integrationstypes.CloudIntegration
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -110,25 +109,29 @@ func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
Where("removed_at is NULL").
Scan(ctx)
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(err, CodeCloudIntegrationAccountNotFound, "couldn't find connected cloud account %s", accountId)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find connected cloud account %s", accountId,
))
} else if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud provider accounts: %w", err,
))
}
return &result, nil
}
func (r *cloudProviderAccountsSQLRepository) Upsert(
func (r *cloudProviderAccountsSQLRepository) upsert(
ctx context.Context,
orgId string,
provider string,
id *string,
config []byte,
config *types.AccountConfig,
accountId *string,
agentReport *integrationstypes.AgentReport,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*integrationstypes.CloudIntegration, error) {
) (*types.CloudIntegration, *model.ApiError) {
// Insert
if id == nil {
temp := valuer.GenerateUUID().StringValue()
@@ -178,7 +181,7 @@ func (r *cloudProviderAccountsSQLRepository) Upsert(
)
}
integration := integrationstypes.CloudIntegration{
integration := types.CloudIntegration{
OrgID: orgId,
Provider: provider,
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
@@ -192,18 +195,22 @@ func (r *cloudProviderAccountsSQLRepository) Upsert(
RemovedAt: removedAt,
}
_, err := r.store.BunDB().NewInsert().
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On(onConflictClause).
Exec(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud integration account")
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud account record: %w", dbErr,
))
}
upsertedAccount, err := r.Get(ctx, orgId, provider, *id)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't get upserted cloud integration account")
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
))
}
return upsertedAccount, nil

View File

@@ -1,4 +1,4 @@
package integrationstypes
package cloudintegrations
import (
"github.com/SigNoz/signoz/pkg/errors"
@@ -41,62 +41,3 @@ var ValidAWSRegions = map[string]bool{
"us-west-1": true, // US West (N. California).
"us-west-2": true, // US West (Oregon).
}
var ValidAzureRegions = map[string]bool{
"australiacentral": true,
"australiacentral2": true,
"australiaeast": true,
"australiasoutheast": true,
"austriaeast": true,
"belgiumcentral": true,
"brazilsouth": true,
"brazilsoutheast": true,
"canadacentral": true,
"canadaeast": true,
"centralindia": true,
"centralus": true,
"chilecentral": true,
"denmarkeast": true,
"eastasia": true,
"eastus": true,
"eastus2": true,
"francecentral": true,
"francesouth": true,
"germanynorth": true,
"germanywestcentral": true,
"indonesiacentral": true,
"israelcentral": true,
"italynorth": true,
"japaneast": true,
"japanwest": true,
"koreacentral": true,
"koreasouth": true,
"malaysiawest": true,
"mexicocentral": true,
"newzealandnorth": true,
"northcentralus": true,
"northeurope": true,
"norwayeast": true,
"norwaywest": true,
"polandcentral": true,
"qatarcentral": true,
"southafricanorth": true,
"southafricawest": true,
"southcentralus": true,
"southindia": true,
"southeastasia": true,
"spaincentral": true,
"swedencentral": true,
"switzerlandnorth": true,
"switzerlandwest": true,
"uaecentral": true,
"uaenorth": true,
"uksouth": true,
"ukwest": true,
"westcentralus": true,
"westeurope": true,
"westindia": true,
"westus": true,
"westus2": true,
"westus3": true,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,796 +0,0 @@
package implawsprovider
import (
"context"
"fmt"
"log/slog"
"net/url"
"slices"
"sort"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/exp/maps"
)
var (
CodeInvalidAWSRegion = errors.MustNewCode("invalid_aws_region")
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
)
type awsProvider struct {
logger *slog.Logger
querier querier.Querier
accountsRepo integrationstore.CloudProviderAccountsRepository
serviceConfigRepo integrationstore.ServiceConfigDatabase
awsServiceDefinitions *services.AWSServicesProvider
}
func NewAWSCloudProvider(
logger *slog.Logger,
accountsRepo integrationstore.CloudProviderAccountsRepository,
serviceConfigRepo integrationstore.ServiceConfigDatabase,
querier querier.Querier,
) integrationstypes.CloudProvider {
awsServiceDefinitions, err := services.NewAWSCloudProviderServices()
if err != nil {
panic("failed to initialize AWS service definitions: " + err.Error())
}
return &awsProvider{
logger: logger,
querier: querier,
accountsRepo: accountsRepo,
serviceConfigRepo: serviceConfigRepo,
awsServiceDefinitions: awsServiceDefinitions,
}
}
func (a *awsProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
accountRecord, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAccountStatus{
Id: accountRecord.ID.String(),
CloudAccountId: accountRecord.AccountID,
Status: accountRecord.Status(),
}, nil
}
func (a *awsProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
if err != nil {
return nil, err
}
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
for _, r := range accountRecords {
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
}
return &integrationstypes.GettableConnectedAccountsList{
Accounts: connectedAccounts,
}, nil
}
func (a *awsProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
// agent can't check in unless the account is already created
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
if err != nil {
return nil, err
}
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
*existingAccount.AccountID,
))
}
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := integrationstypes.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if err != nil {
return nil, err
}
agentConfig, err := a.getAWSAgentConfig(ctx, account)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAWSAgentCheckIn{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: *agentConfig,
}, nil
}
func (a *awsProvider) getAWSAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AWSAgentIntegrationConfig, error) {
// prepare and return integration config to be consumed by agent
agentConfig := &integrationstypes.AWSAgentIntegrationConfig{
EnabledRegions: []string{},
TelemetryCollectionStrategy: &integrationstypes.AWSCollectionStrategy{
Provider: a.GetName(),
AWSMetrics: &integrationstypes.AWSMetricsStrategy{},
AWSLogs: &integrationstypes.AWSLogsStrategy{},
S3Buckets: map[string][]string{},
},
}
accountConfig := new(integrationstypes.AWSAccountConfig)
err := accountConfig.Unmarshal(account.Config)
if err != nil {
return nil, err
}
if accountConfig != nil && accountConfig.EnabledRegions != nil {
agentConfig.EnabledRegions = accountConfig.EnabledRegions
}
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
ctx, account.OrgID, account.ID.StringValue(),
)
if err != nil {
return nil, err
}
// accumulate config in a fixed order to ensure same config generated across runs
configuredServices := maps.Keys(svcConfigs)
slices.Sort(configuredServices)
for _, svcType := range configuredServices {
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, svcType)
if err != nil {
continue
}
config := svcConfigs[svcType]
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
continue
}
if serviceConfig.Logs != nil && serviceConfig.Logs.Enabled {
if svcType == integrationstypes.S3Sync {
// S3 bucket sync; No cloudwatch logs are appended for this service type;
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
agentConfig.TelemetryCollectionStrategy.S3Buckets = serviceConfig.Logs.S3Buckets
} else if definition.Strategy.AWSLogs != nil { // services that includes a logs subscription
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions = append(
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions,
definition.Strategy.AWSLogs.Subscriptions...,
)
}
}
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled && definition.Strategy.AWSMetrics != nil {
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters = append(
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters,
definition.Strategy.AWSMetrics.StreamFilters...,
)
}
}
return agentConfig, nil
}
func (a *awsProvider) GetName() integrationstypes.CloudProviderType {
return integrationstypes.CloudProviderAWS
}
func (a *awsProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
svcConfigs := make(map[string]*integrationstypes.AWSCloudServiceConfig)
if cloudAccountID != nil {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
if err != nil {
return nil, err
}
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.String())
if err != nil {
return nil, err
}
for svcType, config := range serviceConfigs {
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
svcConfigs[svcType] = serviceConfig
}
}
summaries := make([]integrationstypes.AWSServiceSummary, 0)
definitions, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't list aws service definitions: %w", err))
}
for _, def := range definitions {
summary := integrationstypes.AWSServiceSummary{
DefinitionMetadata: def.DefinitionMetadata,
Config: nil,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
}
sort.Slice(summaries, func(i, j int) bool {
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
})
return &integrationstypes.GettableAWSServices{
Services: summaries,
}, nil
}
func (a *awsProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
details := new(integrationstypes.GettableAWSServiceDetails)
awsDefinition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
}
details.AWSServiceDefinition = *awsDefinition
details.Strategy.Provider = a.GetName()
if req.CloudAccountID == nil {
return details, nil
}
config, err := a.getServiceConfig(ctx, &details.AWSServiceDefinition, req.OrgID, a.GetName().String(), req.ServiceId, *req.CloudAccountID)
if err != nil {
return nil, err
}
if config == nil {
return details, nil
}
details.Config = config
connectionStatus, err := a.getServiceConnectionStatus(
ctx,
*req.CloudAccountID,
req.OrgID,
&details.AWSServiceDefinition,
config,
)
if err != nil {
return nil, err
}
details.ConnectionStatus = connectionStatus
return details, nil
}
func (a *awsProvider) getServiceConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID string,
def *integrationstypes.AWSServiceDefinition,
serviceConfig *integrationstypes.AWSCloudServiceConfig,
) (*integrationstypes.ServiceConnectionStatus, error) {
if def.Strategy == nil {
return nil, nil
}
resp := new(integrationstypes.ServiceConnectionStatus)
wg := sync.WaitGroup{}
wg.Add(2)
if def.Strategy.AWSMetrics != nil && serviceConfig.Metrics.Enabled {
go func() {
defer func() {
if r := recover(); r != nil {
a.logger.ErrorContext(
ctx, "panic while getting service metrics connection status",
"error", r,
"service", def.DefinitionMetadata.Id,
)
}
}()
defer wg.Done()
status, _ := a.getServiceMetricsConnectionStatus(ctx, cloudAccountID, orgID, def)
resp.Metrics = status
}()
}
if def.Strategy.AWSLogs != nil && serviceConfig.Logs.Enabled {
go func() {
defer func() {
if r := recover(); r != nil {
a.logger.ErrorContext(
ctx, "panic while getting service logs connection status",
"error", r,
"service", def.DefinitionMetadata.Id,
)
}
}()
defer wg.Done()
status, _ := a.getServiceLogsConnectionStatus(ctx, cloudAccountID, orgID, def)
resp.Logs = status
}()
}
wg.Wait()
return resp, nil
}
func (a *awsProvider) getServiceMetricsConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID string,
def *integrationstypes.AWSServiceDefinition,
) ([]*integrationstypes.SignalConnectionStatus, error) {
if def.Strategy == nil ||
len(def.Strategy.AWSMetrics.StreamFilters) < 1 ||
len(def.DataCollected.Metrics) < 1 {
return nil, nil
}
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
for _, category := range def.IngestionStatusCheck.Metrics {
queries := make([]qbtypes.QueryEnvelope, 0)
for _, check := range category.Checks {
filterExpression := fmt.Sprintf(`cloud.provider="aws" AND cloud.account.id="%s"`, cloudAccountID)
f := ""
for _, attribute := range check.Attributes {
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
if attribute.Value != "" {
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
}
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
}
queries = append(queries, qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: valuer.GenerateUUID().String(),
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{{
MetricName: check.Key,
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationAvg,
}},
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
},
})
}
resp, err := a.querier.QueryRange(ctx, valuer.MustNewUUID(orgID), &qbtypes.QueryRangeRequest{
SchemaVersion: "v5",
Start: uint64(time.Now().Add(-time.Hour).UnixMilli()),
End: uint64(time.Now().UnixMilli()),
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
})
if err != nil {
a.logger.DebugContext(ctx,
"error querying for service metrics connection status",
"error", err,
"service", def.DefinitionMetadata.Id,
)
continue
}
if resp != nil && len(resp.Data.Results) < 1 {
continue
}
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
if !ok {
continue
}
if queryResponse == nil ||
len(queryResponse.Aggregations) < 1 ||
len(queryResponse.Aggregations[0].Series) < 1 ||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
continue
}
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
CategoryID: category.Category,
CategoryDisplayName: category.DisplayName,
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
LastReceivedFrom: "signoz-aws-integration",
})
}
return statusResp, nil
}
func (a *awsProvider) getServiceLogsConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID string,
def *integrationstypes.AWSServiceDefinition,
) ([]*integrationstypes.SignalConnectionStatus, error) {
if def.Strategy == nil ||
len(def.Strategy.AWSLogs.Subscriptions) < 1 ||
len(def.DataCollected.Logs) < 1 {
return nil, nil
}
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
for _, category := range def.IngestionStatusCheck.Logs {
queries := make([]qbtypes.QueryEnvelope, 0)
for _, check := range category.Checks {
filterExpression := fmt.Sprintf(`cloud.account.id="%s"`, cloudAccountID)
f := ""
for _, attribute := range check.Attributes {
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
if attribute.Value != "" {
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
}
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
}
queries = append(queries, qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Name: valuer.GenerateUUID().String(),
Signal: telemetrytypes.SignalLogs,
Aggregations: []qbtypes.LogAggregation{{
Expression: "count()",
}},
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
Limit: 10,
Offset: 0,
},
})
}
resp, err := a.querier.QueryRange(ctx, valuer.MustNewUUID(orgID), &qbtypes.QueryRangeRequest{
SchemaVersion: "v1",
Start: uint64(time.Now().Add(-time.Hour * 1).UnixMilli()),
End: uint64(time.Now().UnixMilli()),
RequestType: qbtypes.RequestTypeTimeSeries,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
})
if err != nil {
a.logger.DebugContext(ctx,
"error querying for service logs connection status",
"error", err,
"service", def.DefinitionMetadata.Id,
)
continue
}
if resp != nil && len(resp.Data.Results) < 1 {
continue
}
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
if !ok {
continue
}
if queryResponse == nil ||
len(queryResponse.Aggregations) < 1 ||
len(queryResponse.Aggregations[0].Series) < 1 ||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
continue
}
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
CategoryID: category.Category,
CategoryDisplayName: category.DisplayName,
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
LastReceivedFrom: "signoz-aws-integration",
})
}
return statusResp, nil
}
func (a *awsProvider) getServiceConfig(ctx context.Context,
def *integrationstypes.AWSServiceDefinition, orgID, cloudProvider, serviceId, cloudAccountId string,
) (*integrationstypes.AWSCloudServiceConfig, error) {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, cloudProvider, cloudAccountId)
if err != nil {
return nil, err
}
config, err := a.serviceConfigRepo.Get(ctx, orgID, activeAccount.ID.StringValue(), serviceId)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil, nil
}
return nil, err
}
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if config != nil && serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
def.PopulateDashboardURLs(serviceId)
}
return serviceConfig, nil
}
func (a *awsProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
if err != nil {
return nil, err
}
// for now service dashboards are only available when metrics are enabled.
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID != nil {
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
if err != nil {
return nil, err
}
for svcId, config := range configsBySvcId {
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
}
}
}
}
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
allServices, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list aws service definitions")
}
for _, svc := range allServices {
serviceDashboardsCreatedAt, ok := servicesWithAvailableMetrics[svc.Id]
if ok {
svcDashboards = integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)
servicesWithAvailableMetrics[svc.Id] = nil
}
}
return svcDashboards, nil
}
func (a *awsProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
if err != nil {
return nil, err
}
for _, d := range allDashboards {
if d.ID == req.ID {
return d, nil
}
}
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
}
func (a *awsProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
connection := new(integrationstypes.PostableAWSConnectionUrl)
err := connection.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if connection.AccountConfig != nil {
for _, region := range connection.AccountConfig.EnabledRegions {
if integrationstypes.ValidAWSRegions[region] {
continue
}
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
}
}
config, err := connection.AccountConfig.Marshal()
if err != nil {
return nil, err
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, integrationstypes.CloudProviderAWS.String(), nil, config,
nil, nil, nil,
)
if err != nil {
return nil, err
}
agentVersion := "v0.0.8"
if connection.AgentConfig.Version != "" {
agentVersion = connection.AgentConfig.Version
}
baseURL := fmt.Sprintf("https://%s.console.aws.amazon.com/cloudformation/home",
connection.AgentConfig.Region)
u, _ := url.Parse(baseURL)
q := u.Query()
q.Set("region", connection.AgentConfig.Region)
u.Fragment = "/stacks/quickcreate"
u.RawQuery = q.Encode()
q = u.Query()
q.Set("stackName", "signoz-integration")
q.Set("templateURL", fmt.Sprintf("https://signoz-integrations.s3.us-east-1.amazonaws.com/aws-quickcreate-template-%s.json", agentVersion))
q.Set("param_SigNozIntegrationAgentVersion", agentVersion)
q.Set("param_SigNozApiUrl", connection.AgentConfig.SigNozAPIUrl)
q.Set("param_SigNozApiKey", connection.AgentConfig.SigNozAPIKey)
q.Set("param_SigNozAccountId", account.ID.StringValue())
q.Set("param_IngestionUrl", connection.AgentConfig.IngestionUrl)
q.Set("param_IngestionKey", connection.AgentConfig.IngestionKey)
return &integrationstypes.GettableAWSConnectionUrl{
AccountId: account.ID.StringValue(),
ConnectionUrl: u.String() + "?&" + q.Encode(), // this format is required by AWS
}, nil
}
func (a *awsProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
}
serviceConfig := new(integrationstypes.PatchableAWSCloudServiceConfig)
err = serviceConfig.Unmarshal(req.Config)
if err != nil {
return nil, err
}
if err = serviceConfig.Config.Validate(definition); err != nil {
return nil, err
}
// can only update config for a connected cloud account id
_, err = a.accountsRepo.GetConnectedCloudAccount(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
)
if err != nil {
return nil, err
}
serviceConfigBytes, err := serviceConfig.Config.Marshal()
if err != nil {
return nil, err
}
updatedConfig, err := a.serviceConfigRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
)
if err != nil {
return nil, err
}
if err = serviceConfig.Unmarshal(updatedConfig); err != nil {
return nil, err
}
return &integrationstypes.PatchServiceConfigResponse{
ServiceId: req.ServiceId,
Config: serviceConfig,
}, nil
}
func (a *awsProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
config := new(integrationstypes.PatchableAWSAccountConfig)
err := config.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if config.Config == nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "account config can't be null")
}
for _, region := range config.Config.EnabledRegions {
if integrationstypes.ValidAWSRegions[region] {
continue
}
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
}
configBytes, err := config.Config.Marshal()
if err != nil {
return nil, err
}
// account must exist to update config, but it doesn't need to be connected
_, err = a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
if err != nil {
return nil, err
}
accountRecord, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
)
if err != nil {
return nil, err
}
return accountRecord.Account(a.GetName()), nil
}
func (a *awsProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
tsNow := time.Now()
account, err = a.accountsRepo.Upsert(
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
)
if err != nil {
return nil, err
}
return account, nil
}

View File

@@ -1,563 +0,0 @@
package implazureprovider
import (
"context"
"fmt"
"log/slog"
"slices"
"sort"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/exp/maps"
)
var (
CodeInvalidAzureRegion = errors.MustNewCode("invalid_azure_region")
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
)
type azureProvider struct {
logger *slog.Logger
accountsRepo store.CloudProviderAccountsRepository
serviceConfigRepo store.ServiceConfigDatabase
azureServiceDefinitions *services.AzureServicesProvider
querier querier.Querier
}
func NewAzureCloudProvider(
logger *slog.Logger,
accountsRepo store.CloudProviderAccountsRepository,
serviceConfigRepo store.ServiceConfigDatabase,
querier querier.Querier,
) integrationstypes.CloudProvider {
azureServiceDefinitions, err := services.NewAzureCloudProviderServices()
if err != nil {
panic("failed to initialize Azure service definitions: " + err.Error())
}
return &azureProvider{
logger: logger,
accountsRepo: accountsRepo,
serviceConfigRepo: serviceConfigRepo,
azureServiceDefinitions: azureServiceDefinitions,
querier: querier,
}
}
func (a *azureProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAccountStatus{
Id: account.ID.String(),
CloudAccountId: account.AccountID,
Status: account.Status(),
}, nil
}
func (a *azureProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
if err != nil {
return nil, err
}
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
for _, r := range accountRecords {
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
}
return &integrationstypes.GettableConnectedAccountsList{
Accounts: connectedAccounts,
}, nil
}
func (a *azureProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
if err != nil {
return nil, err
}
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
*existingAccount.AccountID,
))
}
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := integrationstypes.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if err != nil {
return nil, err
}
agentConfig, err := a.getAzureAgentConfig(ctx, account)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAzureAgentCheckIn{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: *agentConfig,
}, nil
}
func (a *azureProvider) getAzureAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AzureAgentIntegrationConfig, error) {
// prepare and return integration config to be consumed by agent
agentConfig := &integrationstypes.AzureAgentIntegrationConfig{
TelemetryCollectionStrategy: &integrationstypes.AzureCollectionStrategy{
Provider: a.GetName(),
AzureMetrics: make([]*integrationstypes.AzureMetricsStrategy, 0),
AzureLogs: make([]*integrationstypes.AzureLogsStrategy, 0),
},
}
accountConfig := new(integrationstypes.AzureAccountConfig)
err := accountConfig.Unmarshal(account.Config)
if err != nil {
return nil, err
}
if account.Config != nil {
agentConfig.DeploymentRegion = accountConfig.DeploymentRegion
agentConfig.EnabledResourceGroups = accountConfig.EnabledResourceGroups
}
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
ctx, account.OrgID, account.ID.StringValue(),
)
if err != nil {
return nil, err
}
// accumulate config in a fixed order to ensure same config generated across runs
configuredServices := maps.Keys(svcConfigs)
slices.Sort(configuredServices)
metrics := make([]*integrationstypes.AzureMetricsStrategy, 0)
logs := make([]*integrationstypes.AzureLogsStrategy, 0)
for _, svcType := range configuredServices {
definition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, svcType)
if err != nil {
continue
}
config := svcConfigs[svcType]
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
continue
}
metricsStrategyMap := make(map[string]*integrationstypes.AzureMetricsStrategy)
logsStrategyMap := make(map[string]*integrationstypes.AzureLogsStrategy)
for _, metric := range definition.Strategy.AzureMetrics {
metricsStrategyMap[metric.Name] = metric
}
for _, log := range definition.Strategy.AzureLogs {
logsStrategyMap[log.Name] = log
}
if serviceConfig.Metrics != nil {
for _, metric := range serviceConfig.Metrics {
if metric.Enabled {
metrics = append(metrics, &integrationstypes.AzureMetricsStrategy{
CategoryType: metricsStrategyMap[metric.Name].CategoryType,
Name: metric.Name,
})
}
}
}
if serviceConfig.Logs != nil {
for _, log := range serviceConfig.Logs {
if log.Enabled {
logs = append(logs, &integrationstypes.AzureLogsStrategy{
CategoryType: logsStrategyMap[log.Name].CategoryType,
Name: log.Name,
})
}
}
}
}
agentConfig.TelemetryCollectionStrategy.AzureMetrics = metrics
agentConfig.TelemetryCollectionStrategy.AzureLogs = logs
return agentConfig, nil
}
func (a *azureProvider) GetName() valuer.String {
return integrationstypes.CloudProviderAzure
}
func (a *azureProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
svcConfigs := make(map[string]*integrationstypes.AzureCloudServiceConfig)
if cloudAccountID != nil {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
if err != nil {
return nil, err
}
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.StringValue())
if err != nil {
return nil, err
}
for svcType, config := range serviceConfigs {
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
svcConfigs[svcType] = serviceConfig
}
}
summaries := make([]integrationstypes.AzureServiceSummary, 0)
definitions, err := a.azureServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't list aws service definitions: %w", err))
}
for _, def := range definitions {
summary := integrationstypes.AzureServiceSummary{
DefinitionMetadata: def.DefinitionMetadata,
Config: nil,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
}
sort.Slice(summaries, func(i, j int) bool {
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
})
return &integrationstypes.GettableAzureServices{
Services: summaries,
}, nil
}
func (a *azureProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
details := new(integrationstypes.GettableAzureServiceDetails)
azureDefinition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
}
details.AzureServiceDefinition = *azureDefinition
details.Strategy.Provider = a.GetName()
if req.CloudAccountID == nil {
return details, nil
}
config, err := a.getServiceConfig(ctx, &details.AzureServiceDefinition, req.OrgID, req.ServiceId, *req.CloudAccountID)
if err != nil {
return nil, err
}
if config == nil {
return details, nil
}
details.Config = config
// TODO: write logic for getting connection status
return details, nil
}
func (a *azureProvider) getServiceConfig(
ctx context.Context,
definition *integrationstypes.AzureServiceDefinition,
orgID string,
serviceId string,
cloudAccountId string,
) (*integrationstypes.AzureCloudServiceConfig, error) {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), cloudAccountId)
if err != nil {
return nil, err
}
configBytes, err := a.serviceConfigRepo.Get(ctx, orgID, activeAccount.ID.String(), serviceId)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil, nil
}
return nil, err
}
config := new(integrationstypes.AzureCloudServiceConfig)
err = config.Unmarshal(configBytes)
if err != nil {
return nil, err
}
for _, metric := range config.Metrics {
if metric.Enabled {
definition.PopulateDashboardURLs(serviceId)
break
}
}
return config, nil
}
func (a *azureProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
connection := new(integrationstypes.PostableAzureConnectionCommand)
err := connection.Unmarshal(req.Data)
if err != nil {
return nil, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed unmarshal request data into AWS connection config")
}
// validate connection config
if connection.AccountConfig != nil {
if !integrationstypes.ValidAzureRegions[connection.AccountConfig.DeploymentRegion] {
return nil, errors.NewInvalidInputf(CodeInvalidAzureRegion, "invalid azure region: %s",
connection.AccountConfig.DeploymentRegion,
)
}
}
config, err := connection.AccountConfig.Marshal()
if err != nil {
return nil, err
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), nil, config,
nil, nil, nil,
)
if err != nil {
return nil, err
}
agentVersion := "v0.0.1"
if connection.AgentConfig.Version != "" {
agentVersion = connection.AgentConfig.Version
}
// TODO: improve the command and set url
cliCommand := []string{"az", "stack", "sub", "create", "--name", "SigNozIntegration", "--location",
connection.AccountConfig.DeploymentRegion, "--template-uri", fmt.Sprintf("<url>%s", agentVersion),
"--action-on-unmanage", "deleteAll", "--deny-settings-mode", "denyDelete", "--parameters", fmt.Sprintf("rgName=%s", "signoz-integration-rg"),
fmt.Sprintf("rgLocation=%s", connection.AccountConfig.DeploymentRegion)}
return &integrationstypes.GettableAzureConnectionCommand{
AccountId: account.ID.String(),
AzureShellConnectionCommand: "az create",
AzureCliConnectionCommand: strings.Join(cliCommand, " "),
}, nil
}
func (a *azureProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
definition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, err
}
serviceConfig := new(integrationstypes.PatchableAzureCloudServiceConfig)
err = serviceConfig.Unmarshal(req.Config)
if err != nil {
return nil, err
}
if err = serviceConfig.Config.Validate(definition); err != nil {
return nil, err
}
// can only update config for a connected cloud account id
_, err = a.accountsRepo.GetConnectedCloudAccount(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
)
if err != nil {
return nil, err
}
serviceConfigBytes, err := serviceConfig.Config.Marshal()
if err != nil {
return nil, err
}
updatedConfig, err := a.serviceConfigRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
)
if err != nil {
return nil, err
}
if err = serviceConfig.Unmarshal(updatedConfig); err != nil {
return nil, err
}
return &integrationstypes.PatchServiceConfigResponse{
ServiceId: req.ServiceId,
Config: serviceConfig,
}, nil
}
func (a *azureProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
if err != nil {
return nil, err
}
// for now service dashboards are only available when metrics are enabled.
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID == nil {
continue
}
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
if err != nil {
return nil, err
}
for svcId, config := range configsBySvcId {
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if serviceConfig.Metrics != nil {
for _, metric := range serviceConfig.Metrics {
if metric.Enabled {
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
break
}
}
}
}
}
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
allServices, err := a.azureServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list azure service definitions")
}
for _, svc := range allServices {
serviceDashboardsCreatedAt := servicesWithAvailableMetrics[svc.Id]
if serviceDashboardsCreatedAt != nil {
svcDashboards = integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)
servicesWithAvailableMetrics[svc.Id] = nil
}
}
return svcDashboards, nil
}
func (a *azureProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
if err != nil {
return nil, err
}
for _, dashboard := range allDashboards {
if dashboard.ID == req.ID {
return dashboard, nil
}
}
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
}
func (a *azureProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
config := new(integrationstypes.PatchableAzureAccountConfig)
err := config.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if config.Config == nil && len(config.Config.EnabledResourceGroups) < 1 {
return nil, errors.NewInvalidInputf(CodeInvalidAzureRegion, "azure region and resource groups must be provided")
}
//for azure, preserve deployment region if already set
account, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
if err != nil {
return nil, err
}
storedConfig := new(integrationstypes.AzureAccountConfig)
err = storedConfig.Unmarshal(account.Config)
if err != nil {
return nil, err
}
if account.Config != nil {
config.Config.DeploymentRegion = storedConfig.DeploymentRegion
}
configBytes, err := config.Config.Marshal()
if err != nil {
return nil, err
}
accountRecord, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
)
if err != nil {
return nil, err
}
return accountRecord.Account(a.GetName()), nil
}
func (a *azureProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
tsNow := time.Now()
account, err = a.accountsRepo.Upsert(
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
)
if err != nil {
return nil, err
}
return account, nil
}

View File

@@ -1 +1,94 @@
package cloudintegrations
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/types"
)
type ServiceSummary struct {
services.Metadata
Config *types.CloudServiceConfig `json:"config"`
}
type ServiceDetails struct {
services.Definition
Config *types.CloudServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
type LogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type MetricsConfig struct {
Enabled bool `json:"enabled"`
}
type ServiceConnectionStatus struct {
Logs *SignalConnectionStatus `json:"logs"`
Metrics *SignalConnectionStatus `json:"metrics"`
}
type SignalConnectionStatus struct {
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
type CompiledCollectionStrategy = services.CollectionStrategy
func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy, error) {
if provider == "aws" {
return &CompiledCollectionStrategy{
Provider: "aws",
AWSMetrics: &services.AWSMetricsStrategy{},
AWSLogs: &services.AWSLogsStrategy{},
}, nil
}
return nil, errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", provider)
}
// Helper for accumulating strategies for enabled services.
func AddServiceStrategy(serviceType string, cs *CompiledCollectionStrategy,
definitionStrat *services.CollectionStrategy, config *types.CloudServiceConfig) error {
if definitionStrat.Provider != cs.Provider {
return errors.NewInternalf(CodeMismatchCloudProvider, "can't add %s service strategy to compiled strategy for %s",
definitionStrat.Provider, cs.Provider)
}
if cs.Provider == "aws" {
if config.Logs != nil && config.Logs.Enabled {
if serviceType == services.S3Sync {
// S3 bucket sync; No cloudwatch logs are appended for this service type;
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
cs.S3Buckets = config.Logs.S3Buckets
} else if definitionStrat.AWSLogs != nil { // services that includes a logs subscription
cs.AWSLogs.Subscriptions = append(
cs.AWSLogs.Subscriptions,
definitionStrat.AWSLogs.Subscriptions...,
)
}
}
if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
cs.AWSMetrics.StreamFilters = append(
cs.AWSMetrics.StreamFilters,
definitionStrat.AWSMetrics.StreamFilters...,
)
}
return nil
}
return errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cs.Provider)
}

View File

@@ -1,26 +0,0 @@
package cloudintegrations
import (
"log/slog"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implawsprovider"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implazureprovider"
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
)
func NewCloudProviderRegistry(logger *slog.Logger, store sqlstore.SQLStore, querier querier.Querier) map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider {
registry := make(map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider)
accountsRepo := integrationstore.NewCloudProviderAccountsRepository(store)
serviceConfigRepo := integrationstore.NewServiceConfigRepository(store)
awsProviderImpl := implawsprovider.NewAWSCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
registry[integrationstypes.CloudProviderAWS] = awsProviderImpl
azureProviderImpl := implazureprovider.NewAzureCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
registry[integrationstypes.CloudProviderAzure] = azureProviderImpl
return registry
}

View File

@@ -1,63 +1,64 @@
package store
package cloudintegrations
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
CodeServiceConfigNotFound = errors.MustNewCode("service_config_not_found")
)
type ServiceConfigDatabase interface {
Get(
get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) ([]byte, error)
) (*types.CloudServiceConfig, *model.ApiError)
Upsert(
upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config []byte,
) ([]byte, error)
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError)
GetAllForAccount(
getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (
map[string][]byte,
error,
configsBySvcId map[string]*types.CloudServiceConfig,
apiErr *model.ApiError,
)
}
func NewServiceConfigRepository(store sqlstore.SQLStore) ServiceConfigDatabase {
return &serviceConfigSQLRepository{store: store}
func newServiceConfigRepository(store sqlstore.SQLStore) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
store: store,
}, nil
}
type serviceConfigSQLRepository struct {
store sqlstore.SQLStore
}
func (r *serviceConfigSQLRepository) Get(
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) ([]byte, error) {
var result integrationstypes.CloudIntegrationService
) (*types.CloudServiceConfig, *model.ApiError) {
var result types.CloudIntegrationService
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -66,30 +67,36 @@ func (r *serviceConfigSQLRepository) Get(
Where("ci.id = ?", cloudAccountId).
Where("cis.type = ?", serviceType).
Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(err, CodeServiceConfigNotFound, "couldn't find config for cloud account %s", cloudAccountId)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud service config")
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find config for cloud account %s",
cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud service config: %w", err,
))
}
return result.Config, nil
return &result.Config, nil
}
func (r *serviceConfigSQLRepository) Upsert(
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config []byte,
) ([]byte, error) {
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError) {
// get cloud integration id from account id
// if the account is not connected, we don't need to upsert the config
var cloudIntegrationId string
err := r.store.BunDB().NewSelect().
Model((*integrationstypes.CloudIntegration)(nil)).
Model((*types.CloudIntegration)(nil)).
Column("id").
Where("provider = ?", cloudProvider).
Where("account_id = ?", cloudAccountId).
@@ -97,18 +104,14 @@ func (r *serviceConfigSQLRepository) Upsert(
Where("removed_at is NULL").
Where("last_agent_report is not NULL").
Scan(ctx, &cloudIntegrationId)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(
err,
CodeCloudIntegrationAccountNotFound,
"couldn't find active cloud integration account",
)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud integration id")
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud integration id: %w", err,
))
}
serviceConfig := integrationstypes.CloudIntegrationService{
serviceConfig := types.CloudIntegrationService{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
@@ -123,18 +126,21 @@ func (r *serviceConfigSQLRepository) Upsert(
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
Exec(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud service config")
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", err,
))
}
return config, nil
return &serviceConfig.Config, nil
}
func (r *serviceConfigSQLRepository) GetAllForAccount(
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (map[string][]byte, error) {
var serviceConfigs []integrationstypes.CloudIntegrationService
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
serviceConfigs := []types.CloudIntegrationService{}
err := r.store.BunDB().NewSelect().
Model(&serviceConfigs).
@@ -143,13 +149,15 @@ func (r *serviceConfigSQLRepository) GetAllForAccount(
Where("ci.org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query service configs from db")
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := make(map[string][]byte)
result := map[string]*types.CloudServiceConfig{}
for _, r := range serviceConfigs {
result[r.Type] = r.Config
result[r.Type] = &r.Config
}
return result, nil

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_ApplicationELB_ConsumedLCUs_count",
"attributes": []
},
{
"key": "aws_ApplicationELB_ProcessedBytes_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,75 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "rest_api",
"display_name": "REST API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiName",
"operator": "EXISTS",
"value": ""
}
]
}
]
},
{
"category": "http_api",
"display_name": "HTTP API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiId",
"operator": "EXISTS",
"value": ""
}
]
}
]
},
{
"category": "websocket_api",
"display_name": "Websocket API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiId",
"operator": "EXISTS",
"value": ""
}
]
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "API-Gateway%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -217,146 +148,6 @@
"name": "aws_ApiGateway_Latency_sum",
"unit": "Milliseconds",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_count",
"unit": "Count",
"type": "Gauge"
}
],
"logs": [

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_DynamoDB_AccountMaxReads_max",
"attributes": []
},
{
"key": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -409,4 +391,4 @@
}
]
}
}
}

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_EC2_CPUUtilization_max",
"attributes": []
},
{
"key": "aws_EC2_NetworkIn_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -533,4 +515,4 @@
}
]
}
}
}

View File

@@ -7,81 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "overview",
"display_name": "Overview",
"checks": [
{
"key": "aws_ECS_CPUUtilization_max",
"attributes": []
},
{
"key": "aws_ECS_MemoryUtilization_max",
"attributes": []
}
]
},
{
"category": "containerinsights",
"display_name": "Container Insights",
"checks": [
{
"key": "aws_ECS_ContainerInsights_NetworkRxBytes_max",
"attributes": []
},
{
"key": "aws_ECS_ContainerInsights_StorageReadBytes_max",
"attributes": []
}
]
},
{
"category": "enhanced_containerinsights",
"display_name": "Enhanced Container Insights",
"checks": [
{
"key": "aws_ECS_ContainerInsights_ContainerCpuUtilization_max",
"attributes": [
{
"name": "TaskId",
"operator": "EXISTS",
"value": ""
}
]
},
{
"key": "aws_ECS_ContainerInsights_TaskMemoryUtilization_max",
"attributes": [
{
"name": "TaskId",
"operator": "EXISTS",
"value": ""
}
]
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "%/ecs/%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,20 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_ElastiCache_CacheHitRate_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics":[
{
@@ -1942,7 +1928,7 @@
"unit": "Percent",
"type": "Gauge",
"description": ""
}
}
]
},
"telemetry_collection_strategy": {
@@ -1965,4 +1951,4 @@
}
]
}
}
}

View File

@@ -7,37 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_Lambda_Invocations_sum",
"attributes": []
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "/aws/lambda%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,20 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_Kafka_KafkaDataLogsDiskUsed_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -1102,3 +1088,4 @@
]
}
}

View File

@@ -7,37 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_RDS_CPUUtilization_max",
"attributes": []
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "resources.aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "/aws/rds%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -831,4 +800,4 @@
}
]
}
}
}

View File

@@ -7,20 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_SNS_NumberOfMessagesPublished_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -141,4 +127,4 @@
}
]
}
}
}

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_SQS_SentMessageSize_max",
"attributes": []
},
{
"key": "aws_SQS_NumberOfMessagesSent_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -265,4 +247,4 @@
}
]
}
}
}

View File

@@ -1 +0,0 @@
<svg id="f2f04349-8aee-4413-84c9-a9053611b319" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 18 18"><defs><linearGradient id="ad4c4f96-09aa-4f91-ba10-5cb8ad530f74" x1="9" y1="15.83" x2="9" y2="5.79" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#b3b3b3" /><stop offset="0.26" stop-color="#c1c1c1" /><stop offset="1" stop-color="#e6e6e6" /></linearGradient></defs><title>Icon-storage-86</title><path d="M.5,5.79h17a0,0,0,0,1,0,0v9.48a.57.57,0,0,1-.57.57H1.07a.57.57,0,0,1-.57-.57V5.79A0,0,0,0,1,.5,5.79Z" fill="url(#ad4c4f96-09aa-4f91-ba10-5cb8ad530f74)" /><path d="M1.07,2.17H16.93a.57.57,0,0,1,.57.57V5.79a0,0,0,0,1,0,0H.5a0,0,0,0,1,0,0V2.73A.57.57,0,0,1,1.07,2.17Z" fill="#37c2b1" /><path d="M2.81,6.89H15.18a.27.27,0,0,1,.26.27v1.4a.27.27,0,0,1-.26.27H2.81a.27.27,0,0,1-.26-.27V7.16A.27.27,0,0,1,2.81,6.89Z" fill="#fff" /><path d="M2.82,9.68H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V10A.27.27,0,0,1,2.82,9.68Z" fill="#37c2b1" /><path d="M2.82,12.5H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V12.77A.27.27,0,0,1,2.82,12.5Z" fill="#258277" /></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,63 +0,0 @@
{
"id": "blobstorage",
"title": "Blob Storage",
"icon": "file://icon.svg",
"overview": "file://overview.md",
"supported_signals": {
"metrics": true,
"logs": true
},
"data_collected": {
"metrics": [
{
"name": "placeholder_metric_1",
"unit": "Percent",
"type": "Gauge",
"description": ""
}
],
"logs": [
{
"name": "placeholder_log_1",
"path": "placeholder.path.value",
"type": "string"
}
]
},
"telemetry_collection_strategy": {
"azure_metrics": [
{
"category_type": "metrics",
"name": "Capacity"
},
{
"category_type": "metrics",
"name": "Transaction"
}
],
"azure_logs": [
{
"category_type": "logs",
"name": "StorageRead"
},
{
"category_type": "logs",
"name": "StorageWrite"
},
{
"category_type": "logs",
"name": "StorageDelete"
}
]
},
"assets": {
"dashboards": [
{
"id": "overview",
"title": "Blob Storage Overview",
"description": "Overview of Blob Storage",
"definition": "file://assets/dashboards/overview.json"
}
]
}
}

View File

@@ -1 +1,91 @@
package services
import (
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
)
type Metadata struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
}
type Definition struct {
Metadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *CollectionStrategy `json:"telemetry_collection_strategy"`
}
type Assets struct {
Dashboards []Dashboard `json:"dashboards"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
type DataCollected struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type CollectionStrategy struct {
Provider string `json:"provider"`
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type
}
type AWSMetricsStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
StreamFilters []struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
} `json:"cloudwatch_metric_stream_filters"`
}
type AWSLogsStrategy struct {
Subscriptions []struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
} `json:"cloudwatch_logs_subscriptions"`
}
type Dashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
}

View File

@@ -2,128 +2,128 @@ package services
import (
"bytes"
"context"
"embed"
"encoding/json"
"fmt"
"io/fs"
"path"
"sort"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/query-service/model"
koanfJson "github.com/knadh/koanf/parsers/json"
"golang.org/x/exp/maps"
)
const (
S3Sync = "s3sync"
)
var (
CodeServiceDefinitionNotFound = errors.MustNewCode("service_definition_not_dound")
CodeUnsupportedCloudProvider = errors.MustNewCode("unsupported_cloud_provider")
CodeUnsupportedServiceType = errors.MustNewCode("unsupported_service_type")
)
type (
AWSServicesProvider struct {
definitions map[string]*integrationstypes.AWSServiceDefinition
func List(cloudProvider string) ([]Definition, *model.ApiError) {
cloudServices, found := supportedServices[cloudProvider]
if !found || cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudProvider,
))
}
AzureServicesProvider struct {
definitions map[string]*integrationstypes.AzureServiceDefinition
}
)
func (a *AzureServicesProvider) ListServiceDefinitions(ctx context.Context) (map[string]*integrationstypes.AzureServiceDefinition, error) {
return a.definitions, nil
services := maps.Values(cloudServices)
sort.Slice(services, func(i, j int) bool {
return services[i].Id < services[j].Id
})
return services, nil
}
func (a *AzureServicesProvider) GetServiceDefinition(ctx context.Context, serviceName string) (*integrationstypes.AzureServiceDefinition, error) {
def, ok := a.definitions[serviceName]
if !ok {
return nil, errors.NewNotFoundf(CodeServiceDefinitionNotFound, "azure service definition not found: %s", serviceName)
}
return def, nil
}
func (a *AWSServicesProvider) ListServiceDefinitions(ctx context.Context) (map[string]*integrationstypes.AWSServiceDefinition, error) {
return a.definitions, nil
}
func (a *AWSServicesProvider) GetServiceDefinition(ctx context.Context, serviceName string) (*integrationstypes.AWSServiceDefinition, error) {
def, ok := a.definitions[serviceName]
if !ok {
return nil, errors.NewNotFoundf(CodeServiceDefinitionNotFound, "aws service definition not found: %s", serviceName)
}
return def, nil
}
func NewAWSCloudProviderServices() (*AWSServicesProvider, error) {
definitions, err := readAllServiceDefinitions(integrationstypes.CloudProviderAWS)
if err != nil {
return nil, err
}
serviceDefinitions := make(map[string]*integrationstypes.AWSServiceDefinition)
for id, def := range definitions {
typedDef, ok := def.(*integrationstypes.AWSServiceDefinition)
if !ok {
return nil, fmt.Errorf("invalid type for AWS service definition %s", id)
}
serviceDefinitions[id] = typedDef
}
return &AWSServicesProvider{
definitions: serviceDefinitions,
}, nil
}
func NewAzureCloudProviderServices() (*AzureServicesProvider, error) {
definitions, err := readAllServiceDefinitions(integrationstypes.CloudProviderAzure)
if err != nil {
return nil, err
}
serviceDefinitions := make(map[string]*integrationstypes.AzureServiceDefinition)
for id, def := range definitions {
typedDef, ok := def.(*integrationstypes.AzureServiceDefinition)
if !ok {
return nil, fmt.Errorf("invalid type for Azure service definition %s", id)
}
serviceDefinitions[id] = typedDef
}
return &AzureServicesProvider{
definitions: serviceDefinitions,
}, nil
}
// End of API. Logic for reading service definition files follows
//go:embed definitions/*
var definitionFiles embed.FS
func readAllServiceDefinitions(cloudProvider valuer.String) (map[string]any, error) {
rootDirName := "definitions"
cloudProviderDirPath := path.Join(rootDirName, cloudProvider.String())
cloudServices, err := readServiceDefinitionsFromDir(cloudProvider, cloudProviderDirPath)
if err != nil {
return nil, err
}
if len(cloudServices) < 1 {
return nil, errors.NewInternalf(errors.CodeInternal, "no service definitions found in %s", cloudProviderDirPath)
func Map(cloudProvider string) (map[string]Definition, error) {
cloudServices, found := supportedServices[cloudProvider]
if !found || cloudServices == nil {
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cloudProvider)
}
return cloudServices, nil
}
func readServiceDefinitionsFromDir(cloudProvider valuer.String, cloudProviderDirPath string) (map[string]any, error) {
svcDefDirs, err := fs.ReadDir(definitionFiles, cloudProviderDirPath)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't list integrations dirs")
func GetServiceDefinition(cloudProvider, serviceType string) (*Definition, error) {
cloudServices := supportedServices[cloudProvider]
if cloudServices == nil {
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cloudProvider)
}
svcDefs := make(map[string]any)
svc, exists := cloudServices[serviceType]
if !exists {
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedServiceType, "%s service not found: %s", cloudProvider, serviceType)
}
return &svc, nil
}
// End of API. Logic for reading service definition files follows
// Service details read from ./serviceDefinitions
// { "providerName": { "service_id": {...}} }
var supportedServices map[string]map[string]Definition
func init() {
err := readAllServiceDefinitions()
if err != nil {
panic(fmt.Errorf(
"couldn't read cloud service definitions: %w", err,
))
}
}
//go:embed definitions/*
var definitionFiles embed.FS
func readAllServiceDefinitions() error {
supportedServices = map[string]map[string]Definition{}
rootDirName := "definitions"
cloudProviderDirs, err := fs.ReadDir(definitionFiles, rootDirName)
if err != nil {
return fmt.Errorf("couldn't read dirs in %s: %w", rootDirName, err)
}
for _, d := range cloudProviderDirs {
if !d.IsDir() {
continue
}
cloudProvider := d.Name()
cloudProviderDirPath := path.Join(rootDirName, cloudProvider)
cloudServices, err := readServiceDefinitionsFromDir(cloudProvider, cloudProviderDirPath)
if err != nil {
return fmt.Errorf("couldn't read %s service definitions: %w", cloudProvider, err)
}
if len(cloudServices) < 1 {
return fmt.Errorf("no %s services could be read", cloudProvider)
}
supportedServices[cloudProvider] = cloudServices
}
return nil
}
func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath string) (
map[string]Definition, error,
) {
svcDefDirs, err := fs.ReadDir(definitionFiles, cloudProviderDirPath)
if err != nil {
return nil, fmt.Errorf("couldn't list integrations dirs: %w", err)
}
svcDefs := map[string]Definition{}
for _, d := range svcDefDirs {
if !d.IsDir() {
@@ -133,73 +133,103 @@ func readServiceDefinitionsFromDir(cloudProvider valuer.String, cloudProviderDir
svcDirPath := path.Join(cloudProviderDirPath, d.Name())
s, err := readServiceDefinition(cloudProvider, svcDirPath)
if err != nil {
return nil, err
return nil, fmt.Errorf("couldn't read svc definition for %s: %w", d.Name(), err)
}
_, exists := svcDefs[s.GetId()]
_, exists := svcDefs[s.Id]
if exists {
return nil, errors.NewInternalf(errors.CodeInternal, "duplicate service definition for id %s at %s", s.GetId(), d.Name())
return nil, fmt.Errorf(
"duplicate service definition for id %s at %s", s.Id, d.Name(),
)
}
svcDefs[s.GetId()] = s
svcDefs[s.Id] = *s
}
return svcDefs, nil
}
func readServiceDefinition(cloudProvider valuer.String, svcDirpath string) (integrationstypes.Definition, error) {
func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition, error) {
integrationJsonPath := path.Join(svcDirpath, "integration.json")
serializedSpec, err := definitionFiles.ReadFile(integrationJsonPath)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read integration definition in %s", svcDirpath)
return nil, fmt.Errorf(
"couldn't find integration.json in %s: %w",
svcDirpath, err,
)
}
integrationSpec, err := koanfJson.Parser().Unmarshal(serializedSpec)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't parse integration definition in %s", svcDirpath)
return nil, fmt.Errorf(
"couldn't parse integration.json from %s: %w",
integrationJsonPath, err,
)
}
hydrated, err := integrations.HydrateFileUris(integrationSpec, definitionFiles, svcDirpath)
hydrated, err := integrations.HydrateFileUris(
integrationSpec, definitionFiles, svcDirpath,
)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't hydrate integration definition in %s", svcDirpath)
return nil, fmt.Errorf(
"couldn't hydrate files referenced in service definition %s: %w",
integrationJsonPath, err,
)
}
hydratedSpec := hydrated.(map[string]any)
var serviceDef integrationstypes.Definition
switch cloudProvider {
case integrationstypes.CloudProviderAWS:
serviceDef = &integrationstypes.AWSServiceDefinition{}
case integrationstypes.CloudProviderAzure:
serviceDef = &integrationstypes.AzureServiceDefinition{}
default:
// ideally this shouldn't happen hence throwing internal error
return nil, errors.NewInternalf(errors.CodeInternal, "unsupported cloud provider: %s", cloudProvider)
serviceDef, err := ParseStructWithJsonTagsFromMap[Definition](hydratedSpec)
if err != nil {
return nil, fmt.Errorf(
"couldn't parse hydrated JSON spec read from %s: %w",
integrationJsonPath, err,
)
}
err = parseStructWithJsonTagsFromMap(hydratedSpec, serviceDef)
err = validateServiceDefinition(serviceDef)
if err != nil {
return nil, err
}
err = serviceDef.Validate()
if err != nil {
return nil, err
return nil, fmt.Errorf("invalid service definition %s: %w", serviceDef.Id, err)
}
serviceDef.Strategy.Provider = cloudProvider
return serviceDef, nil
}
func parseStructWithJsonTagsFromMap(data map[string]any, target interface{}) error {
mapJson, err := json.Marshal(data)
if err != nil {
return errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal service definition json data")
func validateServiceDefinition(s *Definition) error {
// Validate dashboard data
seenDashboardIds := map[string]interface{}{}
for _, dd := range s.Assets.Dashboards {
if _, seen := seenDashboardIds[dd.Id]; seen {
return fmt.Errorf("multiple dashboards found with id %s", dd.Id)
}
seenDashboardIds[dd.Id] = nil
}
decoder := json.NewDecoder(bytes.NewReader(mapJson))
decoder.DisallowUnknownFields()
err = decoder.Decode(target)
if err != nil {
return errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal service definition json data")
if s.Strategy == nil {
return fmt.Errorf("telemetry_collection_strategy is required")
}
// potentially more to follow
return nil
}
func ParseStructWithJsonTagsFromMap[StructType any](data map[string]any) (
*StructType, error,
) {
mapJson, err := json.Marshal(data)
if err != nil {
return nil, fmt.Errorf("couldn't marshal map to json: %w", err)
}
var res StructType
decoder := json.NewDecoder(bytes.NewReader(mapJson))
decoder.DisallowUnknownFields()
err = decoder.Decode(&res)
if err != nil {
return nil, fmt.Errorf("couldn't unmarshal json back to struct: %w", err)
}
return &res, nil
}

View File

@@ -1,3 +1,35 @@
package services
// TODO: add more tests for services package
import (
"testing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/stretchr/testify/require"
)
func TestAvailableServices(t *testing.T) {
require := require.New(t)
// should be able to list available services.
_, apiErr := List("bad-cloud-provider")
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
awsSvcs, apiErr := List("aws")
require.Nil(apiErr)
require.Greater(len(awsSvcs), 0)
// should be able to get details of a service
_, err := GetServiceDefinition(
"aws", "bad-service-id",
)
require.NotNil(err)
require.True(errors.Ast(err, errors.TypeNotFound))
svc, err := GetServiceDefinition(
"aws", awsSvcs[0].Id,
)
require.Nil(err)
require.Equal(*svc, awsSvcs[0])
}

View File

@@ -6,7 +6,11 @@ import (
"database/sql"
"encoding/json"
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
"github.com/SigNoz/signoz/pkg/queryparser"
"io"
"math"
@@ -21,18 +25,14 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
@@ -44,6 +44,7 @@ import (
"github.com/SigNoz/signoz/pkg/contextlinks"
traceFunnelsModule "github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
@@ -112,7 +113,7 @@ type APIHandler struct {
IntegrationsController *integrations.Controller
cloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
@@ -162,7 +163,7 @@ type APIHandlerOpts struct {
IntegrationsController *integrations.Controller
// Cloud Provider Integrations
CloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
CloudIntegrationsController *cloudintegrations.Controller
// Log parsing pipelines
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
@@ -219,7 +220,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
temporalityMap: make(map[string]map[v3.Temporality]bool),
ruleManager: opts.RuleManager,
IntegrationsController: opts.IntegrationsController,
cloudIntegrationsRegistry: opts.CloudIntegrationsRegistry,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
querier: querier,
querierV2: querierv2,
@@ -1216,22 +1217,13 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
}
dashboard := new(dashboardtypes.Dashboard)
if integrationstypes.IsCloudIntegrationDashboardUuid(id) {
cloudProvider, err := integrationstypes.GetCloudProviderFromDashboardID(id)
if err != nil {
render.Error(rw, err)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
return
}
integrationDashboard, err := aH.cloudIntegrationsRegistry[cloudProvider].GetDashboard(ctx, &integrationstypes.GettableDashboard{
ID: id,
OrgID: orgID,
})
if err != nil {
render.Error(rw, err)
return
}
dashboard = integrationDashboard
dashboard = cloudIntegrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1295,13 +1287,11 @@ func (aH *APIHandler) List(rw http.ResponseWriter, r *http.Request) {
dashboards = append(dashboards, installedIntegrationDashboards...)
}
for _, provider := range aH.cloudIntegrationsRegistry {
cloudIntegrationDashboards, err := provider.GetAvailableDashboards(ctx, orgID)
if err != nil {
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
} else {
dashboards = append(dashboards, cloudIntegrationDashboards...)
}
cloudIntegrationDashboards, apiErr := aH.CloudIntegrationsController.AvailableDashboards(ctx, orgID)
if apiErr != nil {
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
} else {
dashboards = append(dashboards, cloudIntegrationDashboards...)
}
gettableDashboards, err := dashboardtypes.NewGettableDashboardsFromDashboards(dashboards)
@@ -3277,15 +3267,15 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(w http.ResponseWriter, r *h
lookbackSeconds = 15 * 60
}
connectionStatus, err := aH.calculateConnectionStatus(
connectionStatus, apiErr := aH.calculateConnectionStatus(
r.Context(), orgID, connectionTests, lookbackSeconds,
)
if err != nil {
render.Error(w, err)
if apiErr != nil {
RespondError(w, apiErr, "Failed to calculate integration connection status")
return
}
render.Success(w, http.StatusOK, connectionStatus)
aH.Respond(w, connectionStatus)
}
func (aH *APIHandler) calculateConnectionStatus(
@@ -3293,11 +3283,10 @@ func (aH *APIHandler) calculateConnectionStatus(
orgID valuer.UUID,
connectionTests *integrations.IntegrationConnectionTests,
lookbackSeconds int64,
) (*integrations.IntegrationConnectionStatus, error) {
) (*integrations.IntegrationConnectionStatus, *model.ApiError) {
// Calculate connection status for signals in parallel
result := &integrations.IntegrationConnectionStatus{}
// TODO: migrate to errors package
errors := []*model.ApiError{}
var resultLock sync.Mutex
@@ -3495,14 +3484,12 @@ func (aH *APIHandler) UninstallIntegration(w http.ResponseWriter, r *http.Reques
aH.Respond(w, map[string]interface{}{})
}
// RegisterCloudIntegrationsRoutes register routes for cloud provider integrations
// cloud provider integrations
func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1/cloud-integrations").Subrouter()
subRouter.Use(middleware.NewRecovery(aH.Signoz.Instrumentation.Logger()).Wrap)
subRouter.HandleFunc(
"/{cloudProvider}/accounts/generate-connection-url", am.EditAccess(aH.CloudIntegrationsGenerateConnectionArtifact),
"/{cloudProvider}/accounts/generate-connection-url", am.EditAccess(aH.CloudIntegrationsGenerateConnectionUrl),
).Methods(http.MethodPost)
subRouter.HandleFunc(
@@ -3536,199 +3523,170 @@ func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *mi
subRouter.HandleFunc(
"/{cloudProvider}/services/{serviceId}/config", am.EditAccess(aH.CloudIntegrationsUpdateServiceConfig),
).Methods(http.MethodPost)
}
func (aH *APIHandler) CloudIntegrationsGenerateConnectionArtifact(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
r.Context(), claims.OrgID, cloudProvider,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
reqBody, err := io.ReadAll(r.Body)
if err != nil {
render.Error(w, errors.WrapInternalf(err, errors.CodeInternal, "failed to read request body"))
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GenerateConnectionArtifact(r.Context(), &integrationstypes.PostableConnectionArtifact{
OrgID: claims.OrgID,
Data: reqBody,
})
if err != nil {
aH.Signoz.Instrumentation.Logger().ErrorContext(r.Context(),
"failed to generate connection artifact for cloud integration",
slog.String("cloudProvider", cloudProviderString),
slog.String("orgID", claims.OrgID),
)
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, resp)
aH.Respond(w, resp)
}
func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
req := cloudintegrations.GenerateConnectionUrlRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].ListConnectedAccounts(r.Context(), claims.OrgID)
if err != nil {
render.Error(w, err)
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
r.Context(), claims.OrgID, cloudProvider, req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
render.Success(w, http.StatusOK, resp)
aH.Respond(w, result)
}
func (aH *APIHandler) CloudIntegrationsGetAccountStatus(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GetAccountStatus(r.Context(), claims.OrgID, accountId)
if err != nil {
render.Error(w, err)
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
render.Success(w, http.StatusOK, resp)
aH.Respond(w, resp)
}
func (aH *APIHandler) CloudIntegrationsAgentCheckIn(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
req := cloudintegrations.AgentCheckInRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
result, err := aH.CloudIntegrationsController.CheckInAsAgent(
r.Context(), claims.OrgID, cloudProvider, req,
)
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
req := new(integrationstypes.PostableAgentCheckInPayload)
if err = json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(w, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid request body"))
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
req.OrgID = claims.OrgID
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].AgentCheckIn(r.Context(), req)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, resp)
aH.Respond(w, result)
}
func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
reqBody, err := io.ReadAll(r.Body)
if err != nil {
render.Error(w, errors.WrapInternalf(err, errors.CodeInternal, "failed to read request body"))
req := cloudintegrations.UpdateAccountConfigRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].UpdateAccountConfig(r.Context(), &integrationstypes.PatchableAccountConfig{
OrgID: claims.OrgID,
AccountId: accountId,
Data: reqBody,
})
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
render.Success(w, http.StatusOK, resp)
return
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
r.Context(), claims.OrgID, cloudProvider, accountId, req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, result)
}
func (aH *APIHandler) CloudIntegrationsDisconnectAccount(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
result, err := aH.cloudIntegrationsRegistry[cloudProvider].DisconnectAccount(r.Context(), claims.OrgID, accountId)
if err != nil {
render.Error(w, err)
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
render.Success(w, http.StatusOK, result)
aH.Respond(w, result)
}
func (aH *APIHandler) CloudIntegrationsListServices(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
func (aH *APIHandler) CloudIntegrationsListServices(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
var cloudAccountId *string
@@ -3737,22 +3695,26 @@ func (aH *APIHandler) CloudIntegrationsListServices(w http.ResponseWriter, r *ht
cloudAccountId = &cloudAccountIdQP
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].ListServices(r.Context(), claims.OrgID, cloudAccountId)
if err != nil {
render.Error(w, err)
resp, apiErr := aH.CloudIntegrationsController.ListServices(
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
render.Success(w, http.StatusOK, resp)
aH.Respond(w, resp)
}
func (aH *APIHandler) CloudIntegrationsGetServiceDetails(w http.ResponseWriter, r *http.Request) {
func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
w http.ResponseWriter, r *http.Request,
) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
@@ -3764,14 +3726,7 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(w http.ResponseWriter,
return
}
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
cloudProvider := mux.Vars(r)["cloudProvider"]
serviceId := mux.Vars(r)["serviceId"]
var cloudAccountId *string
@@ -3781,59 +3736,270 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(w http.ResponseWriter,
cloudAccountId = &cloudAccountIdQP
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GetServiceDetails(r.Context(), &integrationstypes.GetServiceDetailsReq{
OrgID: orgID.String(),
ServiceId: serviceId,
CloudAccountID: cloudAccountId,
})
if err != nil {
render.Error(w, err)
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
render.Success(w, http.StatusOK, resp)
return
}
func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
serviceId := mux.Vars(r)["serviceId"]
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
reqBody, err := io.ReadAll(r.Body)
if err != nil {
render.Error(w, errors.WrapInternalf(err,
errors.CodeInternal,
"failed to read update service config request body",
))
return
}
result, err := aH.cloudIntegrationsRegistry[cloudProvider].UpdateServiceConfig(
r.Context(), &integrationstypes.PatchableServiceConfig{
OrgID: claims.OrgID,
ServiceId: serviceId,
Config: reqBody,
},
resp, err := aH.CloudIntegrationsController.GetServiceDetails(
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, result)
// Add connection status for the 2 signals.
if cloudAccountId != nil {
connStatus, apiErr := aH.calculateCloudIntegrationServiceConnectionStatus(
r.Context(), orgID, cloudProvider, *cloudAccountId, resp,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
resp.ConnectionStatus = connStatus
}
aH.Respond(w, resp)
}
func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
ctx context.Context,
orgID valuer.UUID,
cloudProvider string,
cloudAccountId string,
svcDetails *cloudintegrations.ServiceDetails,
) (*cloudintegrations.ServiceConnectionStatus, *model.ApiError) {
if cloudProvider != "aws" {
// TODO(Raj): Make connection check generic for all providers in a follow up change
return nil, model.BadRequest(
fmt.Errorf("unsupported cloud provider: %s", cloudProvider),
)
}
telemetryCollectionStrategy := svcDetails.Strategy
if telemetryCollectionStrategy == nil {
return nil, model.InternalError(fmt.Errorf(
"service doesn't have telemetry collection strategy: %s", svcDetails.Id,
))
}
result := &cloudintegrations.ServiceConnectionStatus{}
errors := []*model.ApiError{}
var resultLock sync.Mutex
var wg sync.WaitGroup
// Calculate metrics connection status
if telemetryCollectionStrategy.AWSMetrics != nil {
wg.Add(1)
go func() {
defer wg.Done()
metricsConnStatus, apiErr := aH.calculateAWSIntegrationSvcMetricsConnectionStatus(
ctx, cloudAccountId, telemetryCollectionStrategy.AWSMetrics, svcDetails.DataCollected.Metrics,
)
resultLock.Lock()
defer resultLock.Unlock()
if apiErr != nil {
errors = append(errors, apiErr)
} else {
result.Metrics = metricsConnStatus
}
}()
}
// Calculate logs connection status
if telemetryCollectionStrategy.AWSLogs != nil {
wg.Add(1)
go func() {
defer wg.Done()
logsConnStatus, apiErr := aH.calculateAWSIntegrationSvcLogsConnectionStatus(
ctx, orgID, cloudAccountId, telemetryCollectionStrategy.AWSLogs,
)
resultLock.Lock()
defer resultLock.Unlock()
if apiErr != nil {
errors = append(errors, apiErr)
} else {
result.Logs = logsConnStatus
}
}()
}
wg.Wait()
if len(errors) > 0 {
return nil, errors[0]
}
return result, nil
}
func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
ctx context.Context,
cloudAccountId string,
strategy *services.AWSMetricsStrategy,
metricsCollectedBySvc []services.CollectedMetric,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.StreamFilters) < 1 {
return nil, nil
}
expectedLabelValues := map[string]string{
"cloud_provider": "aws",
"cloud_account_id": cloudAccountId,
}
metricsNamespace := strategy.StreamFilters[0].Namespace
metricsNamespaceParts := strings.Split(metricsNamespace, "/")
if len(metricsNamespaceParts) >= 2 {
expectedLabelValues["service_namespace"] = metricsNamespaceParts[0]
expectedLabelValues["service_name"] = metricsNamespaceParts[1]
} else {
// metrics for single word namespaces like "CWAgent" do not
// have the service_namespace label populated
expectedLabelValues["service_name"] = metricsNamespaceParts[0]
}
metricNamesCollectedBySvc := []string{}
for _, cm := range metricsCollectedBySvc {
metricNamesCollectedBySvc = append(metricNamesCollectedBySvc, cm.Name)
}
statusForLastReceivedMetric, apiErr := aH.reader.GetLatestReceivedMetric(
ctx, metricNamesCollectedBySvc, expectedLabelValues,
)
if apiErr != nil {
return nil, apiErr
}
if statusForLastReceivedMetric != nil {
return &cloudintegrations.SignalConnectionStatus{
LastReceivedTsMillis: statusForLastReceivedMetric.LastReceivedTsMillis,
LastReceivedFrom: "signoz-aws-integration",
}, nil
}
return nil, nil
}
func (aH *APIHandler) calculateAWSIntegrationSvcLogsConnectionStatus(
ctx context.Context,
orgID valuer.UUID,
cloudAccountId string,
strategy *services.AWSLogsStrategy,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.Subscriptions) < 1 {
return nil, nil
}
logGroupNamePrefix := strategy.Subscriptions[0].LogGroupNamePrefix
if len(logGroupNamePrefix) < 1 {
return nil, nil
}
logsConnTestFilter := &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "cloud.account.id",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: "=",
Value: cloudAccountId,
},
{
Key: v3.AttributeKey{
Key: "aws.cloudwatch.log_group_name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: "like",
Value: logGroupNamePrefix + "%",
},
},
}
// TODO(Raj): Receive this as a param from UI in the future.
lookbackSeconds := int64(30 * 60)
qrParams := &v3.QueryRangeParamsV3{
Start: time.Now().UnixMilli() - (lookbackSeconds * 1000),
End: time.Now().UnixMilli(),
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeList,
QueryType: v3.QueryTypeBuilder,
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
PageSize: 1,
Filters: logsConnTestFilter,
QueryName: "A",
DataSource: v3.DataSourceLogs,
Expression: "A",
AggregateOperator: v3.AggregateOperatorNoOp,
},
},
},
}
queryRes, _, err := aH.querier.QueryRange(
ctx, orgID, qrParams,
)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query for integration connection status: %w", err,
))
}
if len(queryRes) > 0 && queryRes[0].List != nil && len(queryRes[0].List) > 0 {
lastLog := queryRes[0].List[0]
return &cloudintegrations.SignalConnectionStatus{
LastReceivedTsMillis: lastLog.Timestamp.UnixMilli(),
LastReceivedFrom: "signoz-aws-integration",
}, nil
}
return nil, nil
}
func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
serviceId := mux.Vars(r)["serviceId"]
req := cloudintegrations.UpdateServiceConfigRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
result, err := aH.CloudIntegrationsController.UpdateServiceConfig(
r.Context(), claims.OrgID, cloudProvider, serviceId, &req,
)
if err != nil {
render.Error(w, err)
return
}
aH.Respond(w, result)
}
// logs

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -108,7 +107,7 @@ type IntegrationsListItem struct {
type Integration struct {
IntegrationDetails
Installation *integrationstypes.InstalledIntegration `json:"installation"`
Installation *types.InstalledIntegration `json:"installation"`
}
type Manager struct {
@@ -224,7 +223,7 @@ func (m *Manager) InstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
config integrationstypes.InstalledIntegrationConfig,
config types.InstalledIntegrationConfig,
) (*IntegrationsListItem, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
if apiErr != nil {
@@ -430,7 +429,7 @@ func (m *Manager) getInstalledIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*integrationstypes.InstalledIntegration, *model.ApiError) {
) (*types.InstalledIntegration, *model.ApiError) {
iis, apiErr := m.installedIntegrationsRepo.get(
ctx, orgId, []string{integrationId},
)
@@ -458,7 +457,7 @@ func (m *Manager) getInstalledIntegrations(
return nil, apiErr
}
installedTypes := utils.MapSlice(installations, func(i integrationstypes.InstalledIntegration) string {
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
return i.Type
})
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)

View File

@@ -4,22 +4,22 @@ import (
"context"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types"
)
type InstalledIntegrationsRepo interface {
list(ctx context.Context, orgId string) ([]integrationstypes.InstalledIntegration, *model.ApiError)
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
get(
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]integrationstypes.InstalledIntegration, *model.ApiError)
) (map[string]types.InstalledIntegration, *model.ApiError)
upsert(
ctx context.Context,
orgId string,
integrationType string,
config integrationstypes.InstalledIntegrationConfig,
) (*integrationstypes.InstalledIntegration, *model.ApiError)
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError)
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
}

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
@@ -27,8 +26,8 @@ func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
func (r *InstalledIntegrationsSqliteRepo) list(
ctx context.Context,
orgId string,
) ([]integrationstypes.InstalledIntegration, *model.ApiError) {
integrations := []integrationstypes.InstalledIntegration{}
) ([]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
err := r.store.BunDB().NewSelect().
Model(&integrations).
@@ -45,8 +44,8 @@ func (r *InstalledIntegrationsSqliteRepo) list(
func (r *InstalledIntegrationsSqliteRepo) get(
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]integrationstypes.InstalledIntegration, *model.ApiError) {
integrations := []integrationstypes.InstalledIntegration{}
) (map[string]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
typeValues := []interface{}{}
for _, integrationType := range integrationTypes {
@@ -63,7 +62,7 @@ func (r *InstalledIntegrationsSqliteRepo) get(
))
}
result := map[string]integrationstypes.InstalledIntegration{}
result := map[string]types.InstalledIntegration{}
for _, ii := range integrations {
result[ii.Type] = ii
}
@@ -75,10 +74,10 @@ func (r *InstalledIntegrationsSqliteRepo) upsert(
ctx context.Context,
orgId string,
integrationType string,
config integrationstypes.InstalledIntegrationConfig,
) (*integrationstypes.InstalledIntegration, *model.ApiError) {
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError) {
integration := integrationstypes.InstalledIntegration{
integration := types.InstalledIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
@@ -115,7 +114,7 @@ func (r *InstalledIntegrationsSqliteRepo) delete(
ctx context.Context, orgId string, integrationType string,
) *model.ApiError {
_, dbErr := r.store.BunDB().NewDelete().
Model(&integrationstypes.InstalledIntegration{}).
Model(&types.InstalledIntegration{}).
Where("type = ?", integrationType).
Where("org_id = ?", orgId).
Exec(ctx)

View File

@@ -71,7 +71,10 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
return nil, err
}
cloudIntegrationsRegistry := cloudintegrations.NewCloudProviderRegistry(signoz.Instrumentation.Logger(), signoz.SQLStore, signoz.Querier)
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
if err != nil {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
@@ -124,7 +127,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
Reader: reader,
RuleManager: rm,
IntegrationsController: integrationsController,
CloudIntegrationsRegistry: cloudIntegrationsRegistry,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),

244
pkg/types/integration.go Normal file
View File

@@ -0,0 +1,244 @@
package types
import (
"database/sql/driver"
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/uptrace/bun"
)
type IntegrationUserEmail string
const (
AWSIntegrationUserEmail IntegrationUserEmail = "aws-integration@signoz.io"
)
var AllIntegrationUserEmails = []IntegrationUserEmail{
AWSIntegrationUserEmail,
}
// --------------------------------------------------------------------------
// Normal integration uses just the installed_integration table
// --------------------------------------------------------------------------
type InstalledIntegration struct {
bun.BaseModel `bun:"table:installed_integration"`
Identifiable
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
}
type InstalledIntegrationConfig map[string]interface{}
// For serializing from db
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// For serializing to db
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not serialize integration config to JSON")
}
return filterSetJson, nil
}
// --------------------------------------------------------------------------
// Cloud integration uses the cloud_integration table
// and cloud_integrations_service table
// --------------------------------------------------------------------------
type CloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integration"`
Identifiable
TimeAuditable
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
Config *AccountConfig `json:"config" bun:"config,type:text"`
AccountID *string `json:"account_id" bun:"account_id,type:text"`
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
OrgID string `bun:"org_id,type:text,unique:provider_id"`
}
func (a *CloudIntegration) Status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *CloudIntegration) Account() Account {
ca := Account{Id: a.ID.StringValue(), Status: a.Status()}
if a.AccountID != nil {
ca.CloudAccountId = *a.AccountID
}
if a.Config != nil {
ca.Config = *a.Config
} else {
ca.Config = DefaultAccountConfig()
}
return ca
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config AccountConfig `json:"config"`
Status AccountStatus `json:"status"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func DefaultAccountConfig() AccountConfig {
return AccountConfig{
EnabledRegions: []string{},
}
}
type AccountConfig struct {
EnabledRegions []string `json:"regions"`
}
// For serializing from db
func (c *AccountConfig) Scan(src any) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// For serializing to db
func (c *AccountConfig) Value() (driver.Value, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
return serialized, nil
}
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
}
// For serializing from db
func (r *AgentReport) Scan(src any) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, r)
}
// For serializing to db
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
)
}
return serialized, nil
}
type CloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
Identifiable
TimeAuditable
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
Config CloudServiceConfig `bun:"config,type:text"`
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
}
type CloudServiceLogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type CloudServiceMetricsConfig struct {
Enabled bool `json:"enabled"`
}
type CloudServiceConfig struct {
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
}
// For serializing from db
func (c *CloudServiceConfig) Scan(src any) error {
var data []byte
switch src := src.(type) {
case []byte:
data = src
case string:
data = []byte(src)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// For serializing to db
func (c *CloudServiceConfig) Value() (driver.Value, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud service config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
return serialized, nil
}

View File

@@ -1,723 +0,0 @@
package integrationstypes
import (
"context"
"database/sql/driver"
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
// CloudProvider defines the interface to be implemented by different cloud providers.
// This is generic interface so it will be accepting and returning generic types instead of concrete.
// It's the cloud provider's responsibility to cast them to appropriate types and validate
type CloudProvider interface {
GetName() CloudProviderType
AgentCheckIn(ctx context.Context, req *PostableAgentCheckInPayload) (any, error)
GenerateConnectionArtifact(ctx context.Context, req *PostableConnectionArtifact) (any, error)
GetAccountStatus(ctx context.Context, orgID, accountID string) (*GettableAccountStatus, error)
ListServices(ctx context.Context, orgID string, accountID *string) (any, error) // returns either GettableAWSServices or GettableAzureServices
GetServiceDetails(ctx context.Context, req *GetServiceDetailsReq) (any, error)
ListConnectedAccounts(ctx context.Context, orgID string) (*GettableConnectedAccountsList, error)
GetDashboard(ctx context.Context, req *GettableDashboard) (*dashboardtypes.Dashboard, error)
GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
UpdateAccountConfig(ctx context.Context, req *PatchableAccountConfig) (any, error) // req can be either PatchableAWSAccountConfig or PatchableAzureAccountConfig
UpdateServiceConfig(ctx context.Context, req *PatchableServiceConfig) (any, error)
DisconnectAccount(ctx context.Context, orgID, accountID string) (*CloudIntegration, error)
}
type GettableDashboard struct {
ID string
OrgID valuer.UUID
}
type GettableCloudIntegrationConnectionParams struct {
IngestionUrl string `json:"ingestion_url,omitempty"`
IngestionKey string `json:"ingestion_key,omitempty"`
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
}
type GettableIngestionKey struct {
Name string `json:"name"`
Value string `json:"value"`
// other attributes from gateway response not included here since they are not being used.
}
type GettableIngestionKeysSearch struct {
Status string `json:"status"`
Data []GettableIngestionKey `json:"data"`
Error string `json:"error"`
}
type GettableCreateIngestionKey struct {
Status string `json:"status"`
Data GettableIngestionKey `json:"data"`
Error string `json:"error"`
}
type GettableDeployment struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
}
type GettableConnectedAccountsList struct {
Accounts []*Account `json:"accounts"`
}
// SigNozAWSAgentConfig represents requirements for agent deployment in user's AWS account
type SigNozAWSAgentConfig struct {
// The region in which SigNoz agent should be installed.
Region string `json:"region"`
IngestionUrl string `json:"ingestion_url"`
IngestionKey string `json:"ingestion_key"`
SigNozAPIUrl string `json:"signoz_api_url"`
SigNozAPIKey string `json:"signoz_api_key"`
Version string `json:"version,omitempty"`
}
type PostableConnectionArtifact struct {
OrgID string
Data []byte // either PostableAWSConnectionUrl or PostableAzureConnectionCommand
}
type PostableAWSConnectionUrl struct {
// Optional. To be specified for updates.
// TODO: evaluate and remove if not needed.
AccountId *string `json:"account_id,omitempty"`
AccountConfig *AWSAccountConfig `json:"account_config"`
AgentConfig *SigNozAWSAgentConfig `json:"agent_config"`
}
type PostableAzureConnectionCommand struct {
AgentConfig *SigNozAzureAgentConfig `json:"agent_config"`
AccountConfig *AzureAccountConfig `json:"account_config"`
}
func (p *PostableAWSConnectionUrl) Unmarshal(src any) error {
var data []byte
switch src := src.(type) {
case []byte:
data = src
case string:
data = []byte(src)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
err := json.Unmarshal(data, p)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize aws connection url request from JSON",
)
}
return nil
}
func (p *PostableAzureConnectionCommand) Unmarshal(src any) error {
var data []byte
switch src := src.(type) {
case []byte:
data = src
case string:
data = []byte(src)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
err := json.Unmarshal(data, p)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize azure connection command request from JSON",
)
}
return nil
}
type SigNozAzureAgentConfig struct {
IngestionUrl string `json:"ingestion_url"`
IngestionKey string `json:"ingestion_key"`
SigNozAPIUrl string `json:"signoz_api_url"`
SigNozAPIKey string `json:"signoz_api_key"`
Version string `json:"version,omitempty"`
}
type GettableAWSConnectionUrl struct {
AccountId string `json:"account_id"`
ConnectionUrl string `json:"connection_url"`
}
type GettableAzureConnectionCommand struct {
AccountId string `json:"account_id"`
AzureShellConnectionCommand string `json:"az_shell_connection_command"`
AzureCliConnectionCommand string `json:"az_cli_connection_command"`
}
type GettableAccountStatus struct {
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status AccountStatus `json:"status"`
}
type PostableAgentCheckInPayload struct {
ID string `json:"account_id"`
AccountID string `json:"cloud_account_id"`
// Arbitrary cloud specific Agent data
Data map[string]any `json:"data,omitempty"`
OrgID string `json:"-"`
}
type GettableAWSAgentCheckIn struct {
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
RemovedAt *time.Time `json:"removed_at"`
IntegrationConfig AWSAgentIntegrationConfig `json:"integration_config"`
}
type AWSAgentIntegrationConfig struct {
EnabledRegions []string `json:"enabled_regions"`
TelemetryCollectionStrategy *AWSCollectionStrategy `json:"telemetry,omitempty"`
}
type AzureAgentIntegrationConfig struct {
DeploymentRegion string `json:"deployment_region"` // will not be changed once set
EnabledResourceGroups []string `json:"resource_groups"`
TelemetryCollectionStrategy *AzureCollectionStrategy `json:"telemetry,omitempty"`
}
type GettableAzureAgentCheckIn struct {
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
RemovedAt *time.Time `json:"removed_at"`
IntegrationConfig AzureAgentIntegrationConfig `json:"integration_config"`
}
type PatchableServiceConfig struct {
OrgID string `json:"org_id"`
ServiceId string `json:"service_id"`
Config []byte `json:"config"` // json serialized config
}
type PatchableAWSCloudServiceConfig struct {
CloudAccountId string `json:"cloud_account_id"`
Config *AWSCloudServiceConfig `json:"config"`
}
type AWSCloudServiceConfig struct {
Logs *AWSCloudServiceLogsConfig `json:"logs,omitempty"`
Metrics *AWSCloudServiceMetricsConfig `json:"metrics,omitempty"`
}
// Unmarshal unmarshalls data from src
func (c *PatchableAWSCloudServiceConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, c)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize aws service config req from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (c *PatchableAWSCloudServiceConfig) Marshal() ([]byte, error) {
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize aws service config req to JSON",
)
}
return serialized, nil
}
// Unmarshal unmarshalls data from src
func (a *AWSCloudServiceConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, a)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (a *AWSCloudServiceConfig) Marshal() ([]byte, error) {
serialized, err := json.Marshal(a)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
return serialized, nil
}
func (a *AWSCloudServiceConfig) Validate(def *AWSServiceDefinition) error {
if def.Id != S3Sync && a.Logs != nil && a.Logs.S3Buckets != nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "s3 buckets can only be added to service-type[%s]", S3Sync)
} else if def.Id == S3Sync && a.Logs != nil && a.Logs.S3Buckets != nil {
for region := range a.Logs.S3Buckets {
if _, found := ValidAWSRegions[region]; !found {
return errors.NewInvalidInputf(CodeInvalidCloudRegion, "invalid cloud region: %s", region)
}
}
}
return nil
}
func (a *AzureCloudServiceConfig) Validate(def *AzureServiceDefinition) error {
logsMap := make(map[string]bool)
metricsMap := make(map[string]bool)
for _, log := range def.Strategy.AzureLogs {
logsMap[log.Name] = true
}
for _, metric := range def.Strategy.AzureMetrics {
metricsMap[metric.Name] = true
}
for _, log := range a.Logs {
if _, found := logsMap[log.Name]; !found {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid log name: %s", log.Name)
}
}
for _, metric := range a.Metrics {
if _, found := metricsMap[metric.Name]; !found {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid metric name: %s", metric.Name)
}
}
return nil
}
type AzureCloudServiceLogsConfig struct {
Enabled bool `json:"enabled"`
Name string `json:"name"`
}
type AzureCloudServiceMetricsConfig struct {
Enabled bool `json:"enabled"`
Name string `json:"name"`
}
type PatchServiceConfigResponse struct {
ServiceId string `json:"id"`
Config any `json:"config"`
}
type PatchableAccountConfig struct {
OrgID string
AccountId string
Data []byte // can be either AWSAccountConfig or AzureAccountConfig
}
type PatchableAWSAccountConfig struct {
Config *AWSAccountConfig `json:"config"`
}
type PatchableAzureAccountConfig struct {
Config *AzureAccountConfig `json:"config"`
}
func (p *PatchableAWSAccountConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, p)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize patchable account config from JSON",
)
}
return nil
}
func (p *PatchableAWSAccountConfig) Marshal() ([]byte, error) {
if p == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "patchable account config is nil")
}
serialized, err := json.Marshal(p)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize patchable account config to JSON",
)
}
return serialized, nil
}
func (p *PatchableAzureAccountConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, p)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize patchable account config from JSON",
)
}
return nil
}
func (p *PatchableAzureAccountConfig) Marshal() ([]byte, error) {
if p == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "patchable account config is nil")
}
serialized, err := json.Marshal(p)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize patchable account config to JSON",
)
}
return serialized, nil
}
type AWSAccountConfig struct {
EnabledRegions []string `json:"regions"`
}
type AzureAccountConfig struct {
DeploymentRegion string `json:"deployment_region,omitempty"`
EnabledResourceGroups []string `json:"resource_groups,omitempty"`
}
// Unmarshal unmarshalls data from src
func (c *AWSAccountConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, c)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize AWS account config from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (c *AWSAccountConfig) Marshal() ([]byte, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
return serialized, nil
}
// Unmarshal unmarshalls data from src
func (c *AzureAccountConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, c)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize azure account config from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (c *AzureAccountConfig) Marshal() ([]byte, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
return serialized, nil
}
type GettableAWSServices struct {
Services []AWSServiceSummary `json:"services"`
}
type GettableAzureServices struct {
Services []AzureServiceSummary `json:"services"`
}
type PatchableAzureCloudServiceConfig struct {
CloudAccountId string `json:"cloud_account_id"`
Config *AzureCloudServiceConfig `json:"config"`
}
type AzureCloudServiceConfig struct {
Logs []*AzureCloudServiceLogsConfig `json:"logs,omitempty"`
Metrics []*AzureCloudServiceMetricsConfig `json:"metrics,omitempty"`
}
// Unmarshal unmarshalls data from src
func (c *PatchableAzureCloudServiceConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, c)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (c *PatchableAzureCloudServiceConfig) Marshal() ([]byte, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud service config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
return serialized, nil
}
func (a *AzureCloudServiceConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, a)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
)
}
return nil
}
func (a *AzureCloudServiceConfig) Marshal() ([]byte, error) {
serialized, err := json.Marshal(a)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
return serialized, nil
}
type GetServiceDetailsReq struct {
OrgID string
ServiceId string
CloudAccountID *string
}
// --------------------------------------------------------------------------
// DATABASE TYPES
// --------------------------------------------------------------------------
// --------------------------------------------------------------------------
// Cloud integration uses the cloud_integration table
// and cloud_integrations_service table
// --------------------------------------------------------------------------
type CloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integration"`
types.Identifiable
types.TimeAuditable
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
Config []byte `json:"config" bun:"config,type:text"` // json serialized config
AccountID *string `json:"account_id" bun:"account_id,type:text"`
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
OrgID string `bun:"org_id,type:text,unique:provider_id"`
}
func (a *CloudIntegration) Status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *CloudIntegration) Account(cloudProvider CloudProviderType) *Account {
ca := &Account{Id: a.ID.StringValue(), Status: a.Status()}
if a.AccountID != nil {
ca.CloudAccountId = *a.AccountID
}
ca.Config = map[string]interface{}{}
if len(a.Config) < 1 {
return ca
}
switch cloudProvider {
case CloudProviderAWS:
config := new(AWSAccountConfig)
_ = config.Unmarshal(a.Config)
ca.Config = config
case CloudProviderAzure:
config := new(AzureAccountConfig)
_ = config.Unmarshal(a.Config)
ca.Config = config
default:
}
return ca
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config any `json:"config"` // AWSAccountConfig or AzureAccountConfig
Status AccountStatus `json:"status"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func DefaultAWSAccountConfig() AWSAccountConfig {
return AWSAccountConfig{
EnabledRegions: []string{},
}
}
func DefaultAzureAccountConfig() AzureAccountConfig {
return AzureAccountConfig{
DeploymentRegion: "",
EnabledResourceGroups: []string{},
}
}
type AWSServiceSummary struct {
DefinitionMetadata
Config *AWSCloudServiceConfig `json:"config"`
}
type AzureServiceSummary struct {
DefinitionMetadata
Config *AzureCloudServiceConfig `json:"config"`
}
type GettableAWSServiceDetails struct {
AWSServiceDefinition
Config *AWSCloudServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type GettableAzureServiceDetails struct {
AzureServiceDefinition
Config *AzureCloudServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type ServiceConnectionStatus struct {
Logs []*SignalConnectionStatus `json:"logs"`
Metrics []*SignalConnectionStatus `json:"metrics"`
}
type SignalConnectionStatus struct {
CategoryID string `json:"category"`
CategoryDisplayName string `json:"category_display_name"`
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
//// AddAWSServiceStrategy is a helper for accumulating strategies for enabled services.
//func AddAWSServiceStrategy(serviceType string, cs *AWSCollectionStrategy,
// definitionStrat *AWSCollectionStrategy, config *AWSCloudServiceConfig) {
// if config.Logs != nil && config.Logs.Enabled {
// if serviceType == S3Sync {
// // S3 bucket sync; No cloudwatch logs are appended for this service type;
// // Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
// cs.S3Buckets = config.Logs.S3Buckets
// } else if definitionStrat.AWSLogs != nil { // services that includes a logs subscription
// cs.AWSLogs.Subscriptions = append(
// cs.AWSLogs.Subscriptions,
// definitionStrat.AWSLogs.Subscriptions...,
// )
// }
// }
// if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
// cs.AWSMetrics.StreamFilters = append(
// cs.AWSMetrics.StreamFilters,
// definitionStrat.AWSMetrics.StreamFilters...,
// )
// }
//}
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
}
// Scan scans data from db
func (r *AgentReport) Scan(src any) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, r)
}
// Value serializes data to bytes for db insertion
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
)
}
return serialized, nil
}
type CloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
types.Identifiable
types.TimeAuditable
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
Config []byte `bun:"config,type:text"` // json serialized config
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
}
type AWSCloudServiceLogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type AWSCloudServiceMetricsConfig struct {
Enabled bool `json:"enabled"`
}

View File

@@ -1,249 +0,0 @@
package integrationstypes
import (
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
S3Sync = "s3sync"
)
type DefinitionMetadata struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
}
type Definition interface {
GetId() string
Validate() error
PopulateDashboardURLs(svcId string)
}
var _ Definition = &AWSServiceDefinition{}
var _ Definition = &AzureServiceDefinition{}
type AWSServiceDefinition struct {
DefinitionMetadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *AWSCollectionStrategy `json:"telemetry_collection_strategy"`
IngestionStatusCheck *IngestionStatusCheck `json:"ingestion_status_check"`
}
type IngestionStatusCheck struct {
Metrics []*IngestionStatusCheckCategory `json:"metrics"`
Logs []*IngestionStatusCheckCategory `json:"logs"`
}
type IngestionStatusCheckCategory struct {
Category string `json:"category"`
DisplayName string `json:"display_name"`
Checks []*IngestionStatusCheckAttribute `json:"checks"`
}
type IngestionStatusCheckAttribute struct {
Key string `json:"key"` // search key (metric name or log message)
Attributes []*IngestionStatusCheckAttributeFilter `json:"attributes"`
}
type IngestionStatusCheckAttributeFilter struct {
Name string `json:"name"`
Operator string `json:"operator"`
Value string `json:"value"`
}
func (def *AWSServiceDefinition) GetId() string {
return def.Id
}
func (def *AWSServiceDefinition) Validate() error {
seenDashboardIds := map[string]interface{}{}
if def.Strategy == nil {
return errors.NewInternalf(errors.CodeInternal, "telemetry_collection_strategy is required")
}
for _, dd := range def.Assets.Dashboards {
if _, seen := seenDashboardIds[dd.Id]; seen {
return errors.NewInternalf(errors.CodeInternal, "multiple dashboards found with id %s for AWS Integration", dd.Id)
}
seenDashboardIds[dd.Id] = nil
}
return nil
}
func (def *AWSServiceDefinition) PopulateDashboardURLs(serviceId string) {
for i := range def.Assets.Dashboards {
dashboardId := def.Assets.Dashboards[i].Id
url := "/dashboard/" + GetCloudIntegrationDashboardID(CloudProviderAWS, serviceId, dashboardId)
def.Assets.Dashboards[i].Url = url
}
}
type AzureServiceDefinition struct {
DefinitionMetadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *AzureCollectionStrategy `json:"telemetry_collection_strategy"`
}
func (def *AzureServiceDefinition) PopulateDashboardURLs(svcId string) {
for i := range def.Assets.Dashboards {
dashboardId := def.Assets.Dashboards[i].Id
url := "/dashboard/" + GetCloudIntegrationDashboardID(CloudProviderAzure, svcId, dashboardId)
def.Assets.Dashboards[i].Url = url
}
}
func (def *AzureServiceDefinition) GetId() string {
return def.Id
}
func (def *AzureServiceDefinition) Validate() error {
seenDashboardIds := map[string]interface{}{}
if def.Strategy == nil {
return errors.NewInternalf(errors.CodeInternal, "telemetry_collection_strategy is required")
}
for _, dd := range def.Assets.Dashboards {
if _, seen := seenDashboardIds[dd.Id]; seen {
return errors.NewInternalf(errors.CodeInternal, "multiple dashboards found with id %s for Azure Integration", dd.Id)
}
seenDashboardIds[dd.Id] = nil
}
return nil
}
type Assets struct {
Dashboards []Dashboard `json:"dashboards"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
type DataCollected struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type AWSCollectionStrategy struct {
Provider valuer.String `json:"provider"`
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type
}
type AzureCollectionStrategy struct {
Provider valuer.String `json:"provider"`
AzureMetrics []*AzureMetricsStrategy `json:"azure_metrics"`
AzureLogs []*AzureLogsStrategy `json:"azure_logs"`
}
type AzureResourceGroup struct {
Name string `json:"name"`
Region string `json:"region"`
}
type AWSMetricsStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
StreamFilters []struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
} `json:"cloudwatch_metric_stream_filters"`
}
type AWSLogsStrategy struct {
Subscriptions []struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
} `json:"cloudwatch_logs_subscriptions"`
}
type AzureMetricsStrategy struct {
CategoryType string `json:"category_type"`
Name string `json:"name"`
}
type AzureLogsStrategy struct {
CategoryType string `json:"category_type"`
Name string `json:"name"`
}
type Dashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
}
func GetCloudIntegrationDashboardID(cloudProvider valuer.String, svcId, dashboardId string) string {
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcId, dashboardId)
}
func GetDashboardsFromAssets(svcId string, cloudProvider CloudProviderType, createdAt *time.Time, assets Assets) []*dashboardtypes.Dashboard {
dashboards := make([]*dashboardtypes.Dashboard, 0)
for _, d := range assets.Dashboards {
author := fmt.Sprintf("%s-integration", cloudProvider)
dashboards = append(dashboards, &dashboardtypes.Dashboard{
ID: GetCloudIntegrationDashboardID(cloudProvider, svcId, d.Id),
Locked: true,
Data: *d.Definition,
TimeAuditable: types.TimeAuditable{
CreatedAt: *createdAt,
UpdatedAt: *createdAt,
},
UserAuditable: types.UserAuditable{
CreatedBy: author,
UpdatedBy: author,
},
})
}
return dashboards
}

View File

@@ -1,109 +0,0 @@
package integrationstypes
import (
"database/sql/driver"
"encoding/json"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
// CloudProviderType type alias
type CloudProviderType = valuer.String
var (
CloudProviderAWS = valuer.NewString("aws")
CloudProviderAzure = valuer.NewString("azure")
)
var (
CodeCloudProviderInvalidInput = errors.MustNewCode("invalid_cloud_provider")
)
func NewCloudProvider(provider string) (CloudProviderType, error) {
switch provider {
case CloudProviderAWS.String(), CloudProviderAzure.String():
return valuer.NewString(provider), nil
default:
return CloudProviderType{}, errors.NewInvalidInputf(CodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
}
}
var (
AWSIntegrationUserEmail = valuer.MustNewEmail("aws-integration@signoz.io")
AzureIntegrationUserEmail = valuer.MustNewEmail("azure-integration@signoz.io")
)
var IntegrationUserEmails = []valuer.Email{
AWSIntegrationUserEmail,
AzureIntegrationUserEmail,
}
func IsCloudIntegrationDashboardUuid(dashboardUuid string) bool {
parts := strings.SplitN(dashboardUuid, "--", 4)
if len(parts) != 4 {
return false
}
return parts[0] == "cloud-integration"
}
func GetCloudProviderFromDashboardID(dashboardUuid string) (CloudProviderType, error) {
parts := strings.SplitN(dashboardUuid, "--", 4)
if len(parts) != 4 {
return valuer.String{}, errors.NewInvalidInputf(CodeCloudProviderInvalidInput, "invalid dashboard uuid: %s", dashboardUuid)
}
providerStr := parts[1]
cloudProvider, err := NewCloudProvider(providerStr)
if err != nil {
return CloudProviderType{}, err
}
return cloudProvider, nil
}
// --------------------------------------------------------------------------
// Normal integration uses just the installed_integration table
// --------------------------------------------------------------------------
type InstalledIntegration struct {
bun.BaseModel `bun:"table:installed_integration"`
types.Identifiable
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
}
type InstalledIntegrationConfig map[string]interface{}
// Scan scans data from db
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// Value serializes data to db
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not serialize integration config to JSON")
}
return filterSetJson, nil
}