Compare commits

..

37 Commits

Author SHA1 Message Date
Swapnil Nakade
7b6752f11e refactor: using QBv5 for connection status 2026-02-12 23:24:38 +05:30
Swapnil Nakade
d32911b0fd Merge branch 'main' into refactor/aws-cloudintegration 2026-02-12 22:59:03 +05:30
Swapnil Nakade
22fcb7e9fb feat: updating cloud integration aws api 2026-02-12 22:53:26 +05:30
Swapnil Nakade
e8d009d225 feat: adding logger in opts 2026-02-12 16:41:23 +05:30
Swapnil Nakade
25b143d21a Merge branch 'main' into refactor/aws-cloudintegration 2026-02-12 16:35:41 +05:30
Swapnil Nakade
4487050375 refactor: improving aws cloud integration apis structure 2026-02-12 16:34:16 +05:30
Swapnil Nakade
f3732611ca refactor: aws cloud integration provider impl 2026-02-12 15:08:55 +05:30
Swapnil Nakade
989ca522f8 refactor: updating azure telemetry collections strat 2026-02-12 14:54:24 +05:30
Swapnil Nakade
9a2e9d76b5 feat: updating service definitions 2026-02-10 20:11:40 +05:30
Swapnil Nakade
2be42deecd Merge branch 'main' into feat/azure-integration 2026-02-10 18:44:40 +05:30
Swapnil Nakade
95cad880cc feat: adding default values for azure service config 2026-02-10 18:21:26 +05:30
Swapnil Nakade
cfef1091b3 Merge branch 'main' into feat/azure-integration 2026-02-09 15:38:17 +05:30
Swapnil Nakade
4504c364f2 feat: adding comments 2026-02-09 15:37:10 +05:30
Swapnil Nakade
1a006870e1 refactor: removing wrong deps 2026-02-09 15:24:02 +05:30
Swapnil Nakade
e7a27a1cfb Merge branch 'main' into feat/azure-integration 2026-02-09 15:19:36 +05:30
Swapnil Nakade
1e7323ead2 refactor: updating response for conn status 2026-02-08 22:56:04 +05:30
Swapnil Nakade
af4c6c5b52 Merge branch 'main' into feat/azure-integration 2026-02-08 18:24:47 +05:30
Swapnil Nakade
02262ba245 refactor: wip 2026-02-08 17:28:04 +05:30
Swapnil Nakade
df7c9e1339 chore: wip 2026-02-08 00:09:29 +05:30
Swapnil Nakade
ac5e52479f refactor: removing comment 2026-02-06 17:48:49 +05:30
Swapnil Nakade
de56477bbb Merge branch 'main' into feat/azure-integration 2026-02-06 17:45:40 +05:30
Swapnil Nakade
fddd8a27fa fix: aws connection url generation 2026-02-06 17:44:06 +05:30
Swapnil Nakade
2aa4f8e237 feat: adding logs 2026-02-06 11:57:23 +05:30
Swapnil Nakade
74006a214b ci: fixing lint ci issues 2026-02-06 04:01:39 +05:30
Swapnil Nakade
ed2cbacadc Merge branch 'main' into feat/azure-integration 2026-02-06 03:55:33 +05:30
Swapnil Nakade
3cbd529843 refactor: reverting 2026-02-06 03:51:29 +05:30
Swapnil Nakade
78b481e895 refactor: cloud integration API cleanup 2026-02-06 03:42:25 +05:30
Swapnil Nakade
215098ec0d refactor: sorting services list for consistency 2026-02-03 13:59:26 +05:30
Swapnil Nakade
5a4ef2e4ce refactor: code beautification 2026-02-03 01:16:58 +05:30
Swapnil Nakade
b1f33c4f7f refactor: updating service details api 2026-02-03 00:52:27 +05:30
Swapnil Nakade
713c84b1e4 refactor: updating cloud provider type 2026-02-03 00:10:18 +05:30
Swapnil Nakade
c3daf9e428 Merge branch 'main' into feat/azure-integration 2026-02-02 23:37:45 +05:30
Swapnil Nakade
70a908deb1 refactor: updating cloud-integration controller 2026-02-02 23:36:46 +05:30
Swapnil Nakade
cc9cdded3c refactor: updating cloud integration apis 2026-02-02 21:50:15 +05:30
Swapnil Nakade
77067cd614 feat: extending cloud integration apis 2026-01-29 12:20:19 +05:30
Swapnil Nakade
ab703d9a65 Merge branch 'main' into feat/azure-integration 2026-01-28 18:28:16 +05:30
Swapnil Nakade
611e8fbf9e refactor: updating azure cloud integrations api 2026-01-28 11:18:58 +05:30
85 changed files with 5687 additions and 9138 deletions

View File

@@ -1,6 +1,7 @@
package api
import (
"log/slog"
"net/http"
"net/http/httputil"
"time"
@@ -13,7 +14,6 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
@@ -30,13 +30,13 @@ type APIHandlerOptions struct {
RulesManager *rules.Manager
UsageManager *usage.Manager
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
GlobalConfig global.Config
Logger *slog.Logger // this is present in Signoz.Instrumentation but adding for quick access
}
type APIHandler struct {
@@ -50,7 +50,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
Reader: opts.DataConnector,
RuleManager: opts.RulesManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
@@ -58,6 +57,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
Logger: opts.Logger,
})
if err != nil {
@@ -118,14 +118,12 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
}
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)
router.HandleFunc(
"/api/v1/cloud-integrations/{cloudProvider}/accounts/generate-connection-params",
am.EditAccess(ah.CloudIntegrationsGenerateConnectionParams),
).Methods(http.MethodGet)
}
func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {

View File

@@ -6,6 +6,7 @@ import (
"encoding/json"
"fmt"
"io"
"log/slog"
"net/http"
"strings"
"time"
@@ -13,20 +14,14 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
type CloudIntegrationConnectionParamsResponse struct {
IngestionUrl string `json:"ingestion_url,omitempty"`
IngestionKey string `json:"ingestion_key,omitempty"`
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
}
// TODO: move this file with other cloud integration related code
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
@@ -41,23 +36,21 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
cloudProvider := mux.Vars(r)["cloudProvider"]
if cloudProvider != "aws" {
RespondError(w, basemodel.BadRequest(fmt.Errorf(
"cloud provider not supported: %s", cloudProvider,
)), nil)
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't provision PAT for cloud integration:",
), nil)
apiKey, err := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if err != nil {
render.Error(w, err)
return
}
result := CloudIntegrationConnectionParamsResponse{
result := integrationstypes.GettableCloudIntegrationConnectionParams{
SigNozAPIKey: apiKey,
}
@@ -71,16 +64,17 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
// Return the API Key (PAT) even if the rest of the params can not be deduced.
// Params not returned from here will be requested from the user via form inputs.
// This enables gracefully degraded but working experience even for non-cloud deployments.
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
ah.Respond(w, result)
ah.opts.Logger.InfoContext(
r.Context(),
"ingestion params and signoz api url can not be deduced since no license was found",
)
render.Success(w, http.StatusOK, result)
return
}
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
), nil)
signozApiUrl, err := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if err != nil {
render.Error(w, err)
return
}
@@ -89,48 +83,41 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
gatewayUrl := ah.opts.GatewayUrl
if len(gatewayUrl) > 0 {
ingestionKey, apiErr := getOrCreateCloudProviderIngestionKey(
ingestionKeyString, err := ah.getOrCreateCloudProviderIngestionKey(
r.Context(), gatewayUrl, license.Key, cloudProvider,
)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't get or create ingestion key",
), nil)
if err != nil {
render.Error(w, err)
return
}
result.IngestionKey = ingestionKey
result.IngestionKey = ingestionKeyString
} else {
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
ah.opts.Logger.InfoContext(
r.Context(),
"ingestion key can't be deduced since no gateway url has been configured",
)
}
ah.Respond(w, result)
render.Success(w, http.StatusOK, result)
}
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider string) (
string, *basemodel.ApiError,
) {
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider valuer.String) (string, error) {
integrationPATName := fmt.Sprintf("%s integration", cloudProvider)
integrationUser, apiErr := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if apiErr != nil {
return "", apiErr
integrationUser, err := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if err != nil {
return "", err
}
orgIdUUID, err := valuer.NewUUID(orgId)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't parse orgId: %w", err,
))
return "", err
}
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err,
))
return "", err
}
for _, p := range allPats {
if p.UserID == integrationUser.ID && p.Name == integrationPATName {
@@ -138,9 +125,10 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
}
}
zap.L().Info(
ah.opts.Logger.InfoContext(
ctx,
"no PAT found for cloud integration, creating a new one",
zap.String("cloudProvider", cloudProvider),
slog.String("cloudProvider", cloudProvider.String()),
)
newPAT, err := types.NewStorableAPIKey(
@@ -150,68 +138,48 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
0,
)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
return "", err
}
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
return "", err
}
return newPAT.Token, nil
}
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
// TODO: move this function out of handler and use proper module structure
func (ah *APIHandler) getOrCreateCloudIntegrationUser(ctx context.Context, orgId string, cloudProvider valuer.String) (*types.User, error) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider.String())
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
return nil, err
}
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
return nil, err
}
return cloudIntegrationUser, nil
}
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, *basemodel.ApiError,
) {
// TODO: remove this struct from here
type deploymentResponse struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
}
// TODO: move this function out of handler and use proper module structure
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (string, error) {
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: error: %w", err,
))
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't query for deployment info: error")
}
resp := new(deploymentResponse)
resp := new(integrationstypes.GettableDeployment)
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal deployment info response")
}
regionDns := resp.ClusterInfo.Region.DNS
@@ -219,9 +187,11 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", basemodel.InternalError(fmt.Errorf(
return "", errors.WrapInternalf(
err,
errors.CodeInternal,
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
))
)
}
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
@@ -229,102 +199,85 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
return signozApiUrl, nil
}
type ingestionKey struct {
Name string `json:"name"`
Value string `json:"value"`
// other attributes from gateway response not included here since they are not being used.
}
type ingestionKeysSearchResponse struct {
Status string `json:"status"`
Data []ingestionKey `json:"data"`
Error string `json:"error"`
}
type createIngestionKeyResponse struct {
Status string `json:"status"`
Data ingestionKey `json:"data"`
Error string `json:"error"`
}
func getOrCreateCloudProviderIngestionKey(
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider string,
) (string, *basemodel.ApiError) {
func (ah *APIHandler) getOrCreateCloudProviderIngestionKey(
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider valuer.String,
) (string, error) {
cloudProviderKeyName := fmt.Sprintf("%s-integration", cloudProvider)
// see if the key already exists
searchResult, apiErr := requestGateway[ingestionKeysSearchResponse](
searchResult, err := requestGateway[integrationstypes.GettableIngestionKeysSearch](
ctx,
gatewayUrl,
licenseKey,
fmt.Sprintf("/v1/workspaces/me/keys/search?name=%s", cloudProviderKeyName),
nil,
ah.opts.Logger,
)
if apiErr != nil {
return "", basemodel.WrapApiError(
apiErr, "couldn't search for cloudprovider ingestion key",
)
if err != nil {
return "", err
}
if searchResult.Status != "success" {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't search for cloudprovider ingestion key: status: %s, error: %s",
return "", errors.NewInternalf(
errors.CodeInternal,
"couldn't search for cloud provider ingestion key: status: %s, error: %s",
searchResult.Status, searchResult.Error,
))
}
for _, k := range searchResult.Data {
if k.Name == cloudProviderKeyName {
if len(k.Value) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", basemodel.InternalError(fmt.Errorf(
"ingestion keys search response not as expected",
))
}
return k.Value, nil
}
}
zap.L().Info(
"no existing ingestion key found for cloud integration, creating a new one",
zap.String("cloudProvider", cloudProvider),
)
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
map[string]any{
"name": cloudProviderKeyName,
"tags": []string{"integration", cloudProvider},
},
)
if apiErr != nil {
return "", basemodel.WrapApiError(
apiErr, "couldn't create cloudprovider ingestion key",
)
}
for _, k := range searchResult.Data {
if k.Name != cloudProviderKeyName {
continue
}
if len(k.Value) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.NewInternalf(errors.CodeInternal, "ingestion keys search response not as expected")
}
return k.Value, nil
}
ah.opts.Logger.InfoContext(
ctx,
"no existing ingestion key found for cloud integration, creating a new one",
slog.String("cloudProvider", cloudProvider.String()),
)
createKeyResult, err := requestGateway[integrationstypes.GettableCreateIngestionKey](
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
map[string]any{
"name": cloudProviderKeyName,
"tags": []string{"integration", cloudProvider.String()},
},
ah.opts.Logger,
)
if err != nil {
return "", err
}
if createKeyResult.Status != "success" {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloudprovider ingestion key: status: %s, error: %s",
return "", errors.NewInternalf(
errors.CodeInternal,
"couldn't create cloud provider ingestion key: status: %s, error: %s",
createKeyResult.Status, createKeyResult.Error,
))
)
}
ingestionKey := createKeyResult.Data.Value
if len(ingestionKey) < 1 {
ingestionKeyString := createKeyResult.Data.Value
if len(ingestionKeyString) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", basemodel.InternalError(fmt.Errorf(
return "", errors.NewInternalf(errors.CodeInternal,
"ingestion key creation response not as expected",
))
)
}
return ingestionKey, nil
return ingestionKeyString, nil
}
func requestGateway[ResponseType any](
ctx context.Context, gatewayUrl string, licenseKey string, path string, payload any,
) (*ResponseType, *basemodel.ApiError) {
ctx context.Context, gatewayUrl, licenseKey, path string, payload any, logger *slog.Logger,
) (*ResponseType, error) {
baseUrl := strings.TrimSuffix(gatewayUrl, "/")
reqUrl := fmt.Sprintf("%s%s", baseUrl, path)
@@ -335,13 +288,12 @@ func requestGateway[ResponseType any](
"X-Consumer-Groups": "ns:default",
}
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload)
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload, logger)
}
func requestAndParseResponse[ResponseType any](
ctx context.Context, url string, headers map[string]string, payload any,
) (*ResponseType, *basemodel.ApiError) {
ctx context.Context, url string, headers map[string]string, payload any, logger *slog.Logger,
) (*ResponseType, error) {
reqMethod := http.MethodGet
var reqBody io.Reader
if payload != nil {
@@ -349,18 +301,14 @@ func requestAndParseResponse[ResponseType any](
bodyJson, err := json.Marshal(payload)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't serialize request payload to JSON: %w", err,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal payload")
}
reqBody = bytes.NewBuffer([]byte(bodyJson))
reqBody = bytes.NewBuffer(bodyJson)
}
req, err := http.NewRequestWithContext(ctx, reqMethod, url, reqBody)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't prepare request: %w", err,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't create req")
}
for k, v := range headers {
@@ -373,23 +321,26 @@ func requestAndParseResponse[ResponseType any](
response, err := client.Do(req)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't make request: %w", err))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't make req")
}
defer response.Body.Close()
defer func() {
err = response.Body.Close()
if err != nil {
logger.ErrorContext(ctx, "couldn't close response body", "error", err)
}
}()
respBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't read response: %w", err))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read response body")
}
var resp ResponseType
err = json.Unmarshal(respBody, &resp)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal gateway response into %T", resp,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal response body")
}
return &resp, nil

View File

@@ -38,7 +38,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
@@ -127,13 +126,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
)
}
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
)
}
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
signoz.SQLStore,
@@ -167,12 +159,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
RulesManager: rm,
UsageManager: usageManager,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
Logger: signoz.Instrumentation.Logger(),
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)

View File

@@ -12,8 +12,6 @@ export interface MockUPlotInstance {
export interface MockUPlotPaths {
spline: jest.Mock;
bars: jest.Mock;
linear: jest.Mock;
stepped: jest.Mock;
}
// Create mock instance methods
@@ -25,23 +23,10 @@ const createMockUPlotInstance = (): MockUPlotInstance => ({
setSeries: jest.fn(),
});
// Path builder: (self, seriesIdx, idx0, idx1) => paths or null
const createMockPathBuilder = (name: string): jest.Mock =>
jest.fn(() => ({
name, // To test if the correct pathBuilder is used
stroke: jest.fn(),
fill: jest.fn(),
clip: jest.fn(),
}));
// Create mock paths - linear, spline, stepped needed by UPlotSeriesBuilder.getPathBuilder
const mockPaths = {
spline: jest.fn(() => createMockPathBuilder('spline')),
bars: jest.fn(() => createMockPathBuilder('bars')),
linear: jest.fn(() => createMockPathBuilder('linear')),
stepped: jest.fn((opts?: { align?: number }) =>
createMockPathBuilder(`stepped-(${opts?.align ?? 0})`),
),
// Create mock paths
const mockPaths: MockUPlotPaths = {
spline: jest.fn(),
bars: jest.fn(),
};
// Mock static methods

View File

@@ -11,7 +11,6 @@ import {
const dashboardVariablesQuery = async (
props: Props,
signal?: AbortSignal,
): Promise<SuccessResponse<VariableResponseProps> | ErrorResponse> => {
try {
const { globalTime } = store.getState();
@@ -33,7 +32,7 @@ const dashboardVariablesQuery = async (
payload.variables = { ...payload.variables, ...timeVariables };
const response = await axios.post(`/variables/query`, payload, { signal });
const response = await axios.post(`/variables/query`, payload);
return {
statusCode: 200,

View File

@@ -19,7 +19,6 @@ export const getFieldValues = async (
startUnixMilli?: number,
endUnixMilli?: number,
existingQuery?: string,
abortSignal?: AbortSignal,
): Promise<SuccessResponseV2<FieldValueResponse>> => {
const params: Record<string, string> = {};
@@ -48,10 +47,7 @@ export const getFieldValues = async (
}
try {
const response = await axios.get('/fields/values', {
params,
signal: abortSignal,
});
const response = await axios.get('/fields/values', { params });
// Normalize values from different types (stringValues, boolValues, etc.)
if (response.data?.data?.values) {

View File

@@ -73,7 +73,6 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
enableRegexOption = false,
isDynamicVariable = false,
showRetryButton = true,
waitingMessage,
...rest
}) => {
// ===== State & Refs =====
@@ -1682,7 +1681,6 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
{!loading &&
!errorMessage &&
!noDataMessage &&
!waitingMessage &&
!(showIncompleteDataMessage && isScrolledToBottom) && (
<section className="navigate">
<ArrowDown size={8} className="icons" />
@@ -1700,17 +1698,7 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
<div className="navigation-text">Refreshing values...</div>
</div>
)}
{!loading && waitingMessage && (
<div className="navigation-loading">
<div className="navigation-icons">
<LoadingOutlined />
</div>
<div className="navigation-text" title={waitingMessage}>
{waitingMessage}
</div>
</div>
)}
{errorMessage && !loading && !waitingMessage && (
{errorMessage && !loading && (
<div className="navigation-error">
<div className="navigation-text">
{errorMessage || SOMETHING_WENT_WRONG}
@@ -1732,7 +1720,6 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
{showIncompleteDataMessage &&
isScrolledToBottom &&
!loading &&
!waitingMessage &&
!errorMessage && (
<div className="navigation-text-incomplete">
Don&apos;t see the value? Use search
@@ -1775,7 +1762,6 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
isDarkMode,
isDynamicVariable,
showRetryButton,
waitingMessage,
]);
// Custom handler for dropdown visibility changes

View File

@@ -63,7 +63,6 @@ const CustomSelect: React.FC<CustomSelectProps> = ({
showIncompleteDataMessage = false,
showRetryButton = true,
isDynamicVariable = false,
waitingMessage,
...rest
}) => {
// ===== State & Refs =====
@@ -569,7 +568,6 @@ const CustomSelect: React.FC<CustomSelectProps> = ({
{!loading &&
!errorMessage &&
!noDataMessage &&
!waitingMessage &&
!(showIncompleteDataMessage && isScrolledToBottom) && (
<section className="navigate">
<ArrowDown size={8} className="icons" />
@@ -585,16 +583,6 @@ const CustomSelect: React.FC<CustomSelectProps> = ({
<div className="navigation-text">Refreshing values...</div>
</div>
)}
{!loading && waitingMessage && (
<div className="navigation-loading">
<div className="navigation-icons">
<LoadingOutlined />
</div>
<div className="navigation-text" title={waitingMessage}>
{waitingMessage}
</div>
</div>
)}
{errorMessage && !loading && (
<div className="navigation-error">
<div className="navigation-text">
@@ -617,7 +605,6 @@ const CustomSelect: React.FC<CustomSelectProps> = ({
{showIncompleteDataMessage &&
isScrolledToBottom &&
!loading &&
!waitingMessage &&
!errorMessage && (
<div className="navigation-text-incomplete">
Don&apos;t see the value? Use search
@@ -654,7 +641,6 @@ const CustomSelect: React.FC<CustomSelectProps> = ({
showRetryButton,
isDarkMode,
isDynamicVariable,
waitingMessage,
]);
// Handle dropdown visibility changes

View File

@@ -30,7 +30,6 @@ export interface CustomSelectProps extends Omit<SelectProps, 'options'> {
showIncompleteDataMessage?: boolean;
showRetryButton?: boolean;
isDynamicVariable?: boolean;
waitingMessage?: string;
}
export interface CustomTagProps {
@@ -67,5 +66,4 @@ export interface CustomMultiSelectProps
enableRegexOption?: boolean;
isDynamicVariable?: boolean;
showRetryButton?: boolean;
waitingMessage?: string;
}

View File

@@ -83,7 +83,7 @@ export const prepareUPlotConfig = ({
drawStyle: DrawStyle.Line,
label: label,
colorMapping: widget.customLegendColors ?? {},
spanGaps: true,
spanGaps: false,
lineStyle: LineStyle.Solid,
lineInterpolation: LineInterpolation.Spline,
showPoints: VisibilityMode.Never,

View File

@@ -14,11 +14,6 @@ export interface GraphVisibilityState {
dataIndex: SeriesVisibilityItem[];
}
export interface SeriesVisibilityState {
labels: string[];
visibility: boolean[];
}
/**
* Context in which a panel is rendered. Used to vary behavior (e.g. persistence,
* interactions) per context.

View File

@@ -1,271 +0,0 @@
import { LOCALSTORAGE } from 'constants/localStorage';
import type { GraphVisibilityState } from '../../types';
import {
getStoredSeriesVisibility,
updateSeriesVisibilityToLocalStorage,
} from '../legendVisibilityUtils';
describe('legendVisibilityUtils', () => {
const storageKey = LOCALSTORAGE.GRAPH_VISIBILITY_STATES;
beforeEach(() => {
localStorage.clear();
jest.spyOn(window.localStorage.__proto__, 'getItem');
jest.spyOn(window.localStorage.__proto__, 'setItem');
});
afterEach(() => {
jest.restoreAllMocks();
});
describe('getStoredSeriesVisibility', () => {
it('returns null when there is no stored visibility state', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).toBeNull();
expect(localStorage.getItem).toHaveBeenCalledWith(storageKey);
});
it('returns null when widget has no stored dataIndex', () => {
const stored: GraphVisibilityState[] = [
{
name: 'widget-1',
dataIndex: [],
},
];
localStorage.setItem(storageKey, JSON.stringify(stored));
const result = getStoredSeriesVisibility('widget-1');
expect(result).toBeNull();
});
it('returns visibility array by index when widget state exists', () => {
const stored: GraphVisibilityState[] = [
{
name: 'widget-1',
dataIndex: [
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
],
},
{
name: 'widget-2',
dataIndex: [{ label: 'Errors', show: true }],
},
];
localStorage.setItem(storageKey, JSON.stringify(stored));
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('returns visibility by index including duplicate labels', () => {
const stored: GraphVisibilityState[] = [
{
name: 'widget-1',
dataIndex: [
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
{ label: 'Memory', show: false },
],
},
];
localStorage.setItem(storageKey, JSON.stringify(stored));
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual({
labels: ['CPU', 'CPU', 'Memory'],
visibility: [true, false, false],
});
});
it('returns null on malformed JSON in localStorage', () => {
localStorage.setItem(storageKey, '{invalid-json');
const result = getStoredSeriesVisibility('widget-1');
expect(result).toBeNull();
});
it('returns null when widget id is not found', () => {
const stored: GraphVisibilityState[] = [
{
name: 'another-widget',
dataIndex: [{ label: 'CPU', show: true }],
},
];
localStorage.setItem(storageKey, JSON.stringify(stored));
const result = getStoredSeriesVisibility('widget-1');
expect(result).toBeNull();
});
});
describe('updateSeriesVisibilityToLocalStorage', () => {
it('creates new visibility state when none exists', () => {
const seriesVisibility = [
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
];
updateSeriesVisibilityToLocalStorage('widget-1', seriesVisibility);
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('adds a new widget entry when other widgets already exist', () => {
const existing: GraphVisibilityState[] = [
{
name: 'widget-existing',
dataIndex: [{ label: 'Errors', show: true }],
},
];
localStorage.setItem(storageKey, JSON.stringify(existing));
const newVisibility = [{ label: 'CPU', show: false }];
updateSeriesVisibilityToLocalStorage('widget-new', newVisibility);
const stored = getStoredSeriesVisibility('widget-new');
expect(stored).not.toBeNull();
expect(stored).toEqual({ labels: ['CPU'], visibility: [false] });
});
it('updates existing widget visibility when entry already exists', () => {
const initialVisibility: GraphVisibilityState[] = [
{
name: 'widget-1',
dataIndex: [
{ label: 'CPU', show: true },
{ label: 'Memory', show: true },
],
},
];
localStorage.setItem(storageKey, JSON.stringify(initialVisibility));
const updatedVisibility = [
{ label: 'CPU', show: false },
{ label: 'Memory', show: true },
];
updateSeriesVisibilityToLocalStorage('widget-1', updatedVisibility);
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [false, true],
});
});
it('silently handles malformed existing JSON without throwing', () => {
localStorage.setItem(storageKey, '{invalid-json');
expect(() =>
updateSeriesVisibilityToLocalStorage('widget-1', [
{ label: 'CPU', show: true },
]),
).not.toThrow();
});
it('when existing JSON is malformed, overwrites with valid data for the widget', () => {
localStorage.setItem(storageKey, '{invalid-json');
updateSeriesVisibilityToLocalStorage('widget-1', [
{ label: 'x-axis', show: true },
{ label: 'CPU', show: false },
]);
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual({
labels: ['x-axis', 'CPU'],
visibility: [true, false],
});
const expected = [
{
name: 'widget-1',
dataIndex: [
{ label: 'x-axis', show: true },
{ label: 'CPU', show: false },
],
},
];
expect(localStorage.setItem).toHaveBeenCalledWith(
storageKey,
JSON.stringify(expected),
);
});
it('preserves other widgets when updating one widget', () => {
const existing: GraphVisibilityState[] = [
{ name: 'widget-a', dataIndex: [{ label: 'A', show: true }] },
{ name: 'widget-b', dataIndex: [{ label: 'B', show: false }] },
];
localStorage.setItem(storageKey, JSON.stringify(existing));
updateSeriesVisibilityToLocalStorage('widget-b', [
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual({
labels: ['A'],
visibility: [true],
});
expect(getStoredSeriesVisibility('widget-b')).toEqual({
labels: ['B'],
visibility: [true],
});
});
it('calls setItem with storage key and stringified visibility states', () => {
updateSeriesVisibilityToLocalStorage('widget-1', [
{ label: 'CPU', show: true },
]);
expect(localStorage.setItem).toHaveBeenCalledTimes(1);
expect(localStorage.setItem).toHaveBeenCalledWith(
storageKey,
expect.any(String),
);
const [_, value] = (localStorage.setItem as jest.Mock).mock.calls[0];
expect((): void => JSON.parse(value)).not.toThrow();
expect(JSON.parse(value)).toEqual([
{ name: 'widget-1', dataIndex: [{ label: 'CPU', show: true }] },
]);
});
it('stores empty dataIndex when seriesVisibility is empty', () => {
updateSeriesVisibilityToLocalStorage('widget-1', []);
const raw = localStorage.getItem(storageKey);
expect(raw).not.toBeNull();
const parsed = JSON.parse(raw ?? '[]');
expect(parsed).toEqual([{ name: 'widget-1', dataIndex: [] }]);
expect(getStoredSeriesVisibility('widget-1')).toBeNull();
});
});
});

View File

@@ -88,7 +88,7 @@ export function buildBaseConfig({
max: undefined,
softMin: widget.softMin ?? undefined,
softMax: widget.softMax ?? undefined,
thresholds: thresholdOptions,
// thresholds,
logBase: widget.isLogScale ? 10 : undefined,
distribution: widget.isLogScale
? DistributionType.Logarithmic

View File

@@ -1,20 +1,15 @@
import { LOCALSTORAGE } from 'constants/localStorage';
import {
GraphVisibilityState,
SeriesVisibilityItem,
SeriesVisibilityState,
} from '../types';
import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
/**
* Retrieves the stored series visibility for a specific widget from localStorage by index.
* Index 0 is the x-axis (time); indices 1, 2, ... are data series (same order as uPlot plot.series).
* Retrieves the visibility map for a specific widget from localStorage
* @param widgetId - The unique identifier of the widget
* @returns visibility[i] = show state for series at index i, or null if not found
* @returns A Map of series labels to their visibility state, or null if not found
*/
export function getStoredSeriesVisibility(
widgetId: string,
): SeriesVisibilityState | null {
): Map<string, boolean> | null {
try {
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
@@ -29,15 +24,8 @@ export function getStoredSeriesVisibility(
return null;
}
return {
labels: widgetState.dataIndex.map((item) => item.label),
visibility: widgetState.dataIndex.map((item) => item.show),
};
} catch (error) {
if (error instanceof SyntaxError) {
// If the stored data is malformed, remove it
localStorage.removeItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
}
return new Map(widgetState.dataIndex.map((item) => [item.label, item.show]));
} catch {
// Silently handle parsing errors - fall back to default visibility
return null;
}
@@ -47,31 +35,40 @@ export function updateSeriesVisibilityToLocalStorage(
widgetId: string,
seriesVisibility: SeriesVisibilityItem[],
): void {
let visibilityStates: GraphVisibilityState[] = [];
try {
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
visibilityStates = JSON.parse(storedData || '[]');
} catch (error) {
if (error instanceof SyntaxError) {
visibilityStates = [];
let visibilityStates: GraphVisibilityState[];
if (!storedData) {
visibilityStates = [
{
name: widgetId,
dataIndex: seriesVisibility,
},
];
} else {
visibilityStates = JSON.parse(storedData);
}
}
const widgetState = visibilityStates.find((state) => state.name === widgetId);
const widgetState = visibilityStates.find((state) => state.name === widgetId);
if (widgetState) {
widgetState.dataIndex = seriesVisibility;
} else {
visibilityStates = [
...visibilityStates,
{
name: widgetId,
dataIndex: seriesVisibility,
},
];
}
if (!widgetState) {
visibilityStates = [
...visibilityStates,
{
name: widgetId,
dataIndex: seriesVisibility,
},
];
} else {
widgetState.dataIndex = seriesVisibility;
}
localStorage.setItem(
LOCALSTORAGE.GRAPH_VISIBILITY_STATES,
JSON.stringify(visibilityStates),
);
localStorage.setItem(
LOCALSTORAGE.GRAPH_VISIBILITY_STATES,
JSON.stringify(visibilityStates),
);
} catch {
// Silently handle parsing errors - fall back to default visibility
}
}

View File

@@ -1,6 +1,5 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
import HistogramPanelWrapper from './HistogramPanelWrapper';
import ListPanelWrapper from './ListPanelWrapper';
import PiePanelWrapper from './PiePanelWrapper';
@@ -9,7 +8,7 @@ import UplotPanelWrapper from './UplotPanelWrapper';
import ValuePanelWrapper from './ValuePanelWrapper';
export const PanelTypeVsPanelWrapper = {
[PANEL_TYPES.TIME_SERIES]: TimeSeriesPanel,
[PANEL_TYPES.TIME_SERIES]: UplotPanelWrapper,
[PANEL_TYPES.TABLE]: TablePanelWrapper,
[PANEL_TYPES.LIST]: ListPanelWrapper,
[PANEL_TYPES.VALUE]: ValuePanelWrapper,

View File

@@ -1,61 +0,0 @@
import { useCallback, useEffect, useRef, useState } from 'react';
const DEFAULT_COPIED_RESET_MS = 2000;
export interface UseCopyToClipboardOptions {
/** How long (ms) to keep "copied" state before resetting. Default 2000. */
copiedResetMs?: number;
}
export type ID = number | string | null;
export interface UseCopyToClipboardReturn {
/** Copy text to clipboard. Pass an optional id to track which item was copied (e.g. seriesIndex). */
copyToClipboard: (text: string, id?: ID) => void;
/** True when something was just copied and still within the reset threshold. */
isCopied: boolean;
/** The id passed to the last successful copy, or null after reset. Use to show "copied" state for a specific item (e.g. copiedId === item.seriesIndex). */
id: ID;
}
export function useCopyToClipboard(
options: UseCopyToClipboardOptions = {},
): UseCopyToClipboardReturn {
const { copiedResetMs = DEFAULT_COPIED_RESET_MS } = options;
const [state, setState] = useState<{ isCopied: boolean; id: ID }>({
isCopied: false,
id: null,
});
const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
useEffect(() => {
return (): void => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
timeoutRef.current = null;
}
};
}, []);
const copyToClipboard = useCallback(
(text: string, id?: ID): void => {
navigator.clipboard.writeText(text).then(() => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
setState({ isCopied: true, id: id ?? null });
timeoutRef.current = setTimeout(() => {
setState({ isCopied: false, id: null });
timeoutRef.current = null;
}, copiedResetMs);
});
},
[copiedResetMs],
);
return {
copyToClipboard,
isCopied: state.isCopied,
id: state.id,
};
}

View File

@@ -1,445 +0,0 @@
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import {
buildVariableReferencePattern,
extractQueryTextStrings,
getVariableReferencesInQuery,
textContainsVariableReference,
} from './variableReference';
describe('buildVariableReferencePattern', () => {
const varName = 'deployment_environment';
it.each([
['{{.deployment_environment}}', '{{.var}} syntax'],
['{{ .deployment_environment }}', '{{.var}} with spaces'],
['{{deployment_environment}}', '{{var}} syntax'],
['{{ deployment_environment }}', '{{var}} with spaces'],
['$deployment_environment', '$var syntax'],
['[[deployment_environment]]', '[[var]] syntax'],
['[[ deployment_environment ]]', '[[var]] with spaces'],
])('matches %s (%s)', (text) => {
expect(buildVariableReferencePattern(varName).test(text)).toBe(true);
});
it('does not match partial variable names', () => {
const pattern = buildVariableReferencePattern('env');
// $env should match at word boundary, but $environment should not match $env
expect(pattern.test('$environment')).toBe(false);
});
it('matches $var at word boundary within larger text', () => {
const pattern = buildVariableReferencePattern('env');
expect(pattern.test('SELECT * WHERE x = $env')).toBe(true);
expect(pattern.test('$env AND y = 1')).toBe(true);
});
});
describe('textContainsVariableReference', () => {
describe('guard clauses', () => {
it('returns false for empty text', () => {
expect(textContainsVariableReference('', 'var')).toBe(false);
});
it('returns false for empty variable name', () => {
expect(textContainsVariableReference('some text', '')).toBe(false);
});
});
describe('all syntax formats', () => {
const varName = 'service_name';
it('detects {{.var}} format', () => {
const query = "SELECT * FROM table WHERE service = '{{.service_name}}'";
expect(textContainsVariableReference(query, varName)).toBe(true);
});
it('detects {{var}} format', () => {
const query = "SELECT * FROM table WHERE service = '{{service_name}}'";
expect(textContainsVariableReference(query, varName)).toBe(true);
});
it('detects $var format', () => {
const query = "SELECT * FROM table WHERE service = '$service_name'";
expect(textContainsVariableReference(query, varName)).toBe(true);
});
it('detects [[var]] format', () => {
const query = "SELECT * FROM table WHERE service = '[[service_name]]'";
expect(textContainsVariableReference(query, varName)).toBe(true);
});
});
describe('embedded in larger text', () => {
it('finds variable in a multi-line query', () => {
const query = `SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name
FROM signoz_metrics.distributed_time_series_v4_1day
WHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}
GROUP BY k8s_node_name`;
expect(textContainsVariableReference(query, 'k8s_cluster_name')).toBe(true);
expect(textContainsVariableReference(query, 'k8s_node_name')).toBe(false); // plain text, not a variable reference
});
});
describe('no false positives', () => {
it('does not match substring of a longer variable name', () => {
expect(
textContainsVariableReference('$service_name_v2', 'service_name'),
).toBe(false);
});
it('does not match plain text that happens to contain the name', () => {
expect(
textContainsVariableReference(
'the service_name column is important',
'service_name',
),
).toBe(false);
});
});
});
// ---- Query text extraction & variable reference detection ----
const baseQuery: Query = {
id: 'test-query',
queryType: EQueryType.QUERY_BUILDER,
promql: [],
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
clickhouse_sql: [],
};
describe('extractQueryTextStrings', () => {
it('returns empty array for query builder with no data', () => {
expect(extractQueryTextStrings(baseQuery)).toEqual([]);
});
it('extracts string values from query builder filter items', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: {
items: [
{ id: '1', op: '=', value: ['$service_name', 'hardcoded'] },
{ id: '2', op: '=', value: '$env' },
],
op: 'AND',
},
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
const texts = extractQueryTextStrings(query);
expect(texts).toEqual(['$service_name', 'hardcoded', '$env']);
});
it('extracts filter expression from query builder', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: { items: [], op: 'AND' },
filter: { expression: 'env = $deployment_environment' },
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
const texts = extractQueryTextStrings(query);
expect(texts).toEqual(['env = $deployment_environment']);
});
it('skips non-string filter values', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: {
items: [{ id: '1', op: '=', value: [42, true] }],
op: 'AND',
},
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
expect(extractQueryTextStrings(query)).toEqual([]);
});
it('extracts promql query strings', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.PROM,
promql: [
{ name: 'A', query: 'up{env="$env"}', legend: '', disabled: false },
{ name: 'B', query: 'cpu{ns="$namespace"}', legend: '', disabled: false },
],
};
expect(extractQueryTextStrings(query)).toEqual([
'up{env="$env"}',
'cpu{ns="$namespace"}',
]);
});
it('extracts clickhouse sql query strings', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.CLICKHOUSE,
clickhouse_sql: [
{
name: 'A',
query: 'SELECT * WHERE env = {{.env}}',
legend: '',
disabled: false,
},
],
};
expect(extractQueryTextStrings(query)).toEqual([
'SELECT * WHERE env = {{.env}}',
]);
});
it('accumulates texts across multiple queryData entries', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: {
items: [{ id: '1', op: '=', value: '$env' }],
op: 'AND',
},
} as unknown) as IBuilderQuery,
({
filters: {
items: [{ id: '2', op: '=', value: ['$service_name'] }],
op: 'AND',
},
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
expect(extractQueryTextStrings(query)).toEqual(['$env', '$service_name']);
});
it('collects both filter items and filter expression from the same queryData', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: {
items: [{ id: '1', op: '=', value: '$service_name' }],
op: 'AND',
},
filter: { expression: 'env = $deployment_environment' },
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
expect(extractQueryTextStrings(query)).toEqual([
'$service_name',
'env = $deployment_environment',
]);
});
it('skips promql entries with empty query strings', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.PROM,
promql: [
{ name: 'A', query: '', legend: '', disabled: false },
{ name: 'B', query: 'up{env="$env"}', legend: '', disabled: false },
],
};
expect(extractQueryTextStrings(query)).toEqual(['up{env="$env"}']);
});
it('skips clickhouse entries with empty query strings', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.CLICKHOUSE,
clickhouse_sql: [
{ name: 'A', query: '', legend: '', disabled: false },
{
name: 'B',
query: 'SELECT * WHERE x = {{.env}}',
legend: '',
disabled: false,
},
],
};
expect(extractQueryTextStrings(query)).toEqual([
'SELECT * WHERE x = {{.env}}',
]);
});
it('returns empty array for unknown query type', () => {
const query = {
...baseQuery,
queryType: ('unknown' as unknown) as EQueryType,
};
expect(extractQueryTextStrings(query)).toEqual([]);
});
});
describe('getVariableReferencesInQuery', () => {
const variableNames = [
'deployment_environment',
'service_name',
'endpoint',
'unused_var',
];
it('returns empty array when query has no text', () => {
expect(getVariableReferencesInQuery(baseQuery, variableNames)).toEqual([]);
});
it('detects variables referenced in query builder filters', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: {
items: [
{ id: '1', op: '=', value: '$service_name' },
{ id: '2', op: 'IN', value: ['$deployment_environment'] },
],
op: 'AND',
},
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
const result = getVariableReferencesInQuery(query, variableNames);
expect(result).toEqual(['deployment_environment', 'service_name']);
});
it('detects variables in promql queries', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.PROM,
promql: [
{
name: 'A',
query:
'http_requests{env="{{.deployment_environment}}", endpoint="$endpoint"}',
legend: '',
disabled: false,
},
],
};
const result = getVariableReferencesInQuery(query, variableNames);
expect(result).toEqual(['deployment_environment', 'endpoint']);
});
it('detects variables in clickhouse sql queries', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.CLICKHOUSE,
clickhouse_sql: [
{
name: 'A',
query: 'SELECT * FROM table WHERE service = [[service_name]]',
legend: '',
disabled: false,
},
],
};
const result = getVariableReferencesInQuery(query, variableNames);
expect(result).toEqual(['service_name']);
});
it('detects variables spread across multiple queryData entries', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
({
filters: {
items: [{ id: '1', op: '=', value: '$service_name' }],
op: 'AND',
},
} as unknown) as IBuilderQuery,
({
filter: { expression: 'env = $deployment_environment' },
} as unknown) as IBuilderQuery,
],
queryFormulas: [],
queryTraceOperator: [],
},
};
const result = getVariableReferencesInQuery(query, variableNames);
expect(result).toEqual(['deployment_environment', 'service_name']);
});
it('returns empty array when no variables are referenced', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.PROM,
promql: [
{
name: 'A',
query: 'up{job="api"}',
legend: '',
disabled: false,
},
],
};
expect(getVariableReferencesInQuery(query, variableNames)).toEqual([]);
});
it('returns empty array when variableNames list is empty', () => {
const query: Query = {
...baseQuery,
queryType: EQueryType.PROM,
promql: [
{
name: 'A',
query: 'up{env="$deployment_environment"}',
legend: '',
disabled: false,
},
],
};
expect(getVariableReferencesInQuery(query, [])).toEqual([]);
});
});

View File

@@ -1,136 +0,0 @@
import { isArray } from 'lodash-es';
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
/**
* Builds a RegExp that matches any recognized variable reference syntax:
* {{.variableName}} — dot prefix, optional whitespace
* {{variableName}} — no dot, optional whitespace
* $variableName — dollar prefix, word-boundary terminated
* [[variableName]] — square brackets, optional whitespace
*/
export function buildVariableReferencePattern(variableName: string): RegExp {
const patterns = [
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`,
`\\{\\{\\s*${variableName}\\s*\\}\\}`,
`\\$${variableName}\\b`,
`\\[\\[\\s*${variableName}\\s*\\]\\]`,
];
return new RegExp(patterns.join('|'));
}
/**
* Returns true if `text` contains a reference to `variableName` in any of the
* recognized variable syntaxes.
*/
export function textContainsVariableReference(
text: string,
variableName: string,
): boolean {
if (!text || !variableName) {
return false;
}
return buildVariableReferencePattern(variableName).test(text);
}
/**
* Extracts all text strings from a widget Query that could contain variable
* references. Covers:
* - QUERY_BUILDER: filter items' string values + filter.expression
* - PROM: each promql[].query
* - CLICKHOUSE: each clickhouse_sql[].query
*/
function extractQueryBuilderTexts(query: Query): string[] {
const texts: string[] = [];
const queryDataList = query.builder?.queryData;
if (isArray(queryDataList)) {
queryDataList.forEach((queryData) => {
// Collect string values from filter items
queryData.filters?.items?.forEach((filter: TagFilterItem) => {
if (isArray(filter.value)) {
filter.value.forEach((v) => {
if (typeof v === 'string') {
texts.push(v);
}
});
} else if (typeof filter.value === 'string') {
texts.push(filter.value);
}
});
// Collect filter expression
if (queryData.filter?.expression) {
texts.push(queryData.filter.expression);
}
});
}
return texts;
}
function extractPromQLTexts(query: Query): string[] {
const texts: string[] = [];
if (isArray(query.promql)) {
query.promql.forEach((promqlQuery) => {
if (promqlQuery.query) {
texts.push(promqlQuery.query);
}
});
}
return texts;
}
function extractClickhouseSQLTexts(query: Query): string[] {
const texts: string[] = [];
if (isArray(query.clickhouse_sql)) {
query.clickhouse_sql.forEach((clickhouseQuery) => {
if (clickhouseQuery.query) {
texts.push(clickhouseQuery.query);
}
});
}
return texts;
}
/**
* Extracts all text strings from a widget Query that could contain variable
* references. Covers:
* - QUERY_BUILDER: filter items' string values + filter.expression
* - PROM: each promql[].query
* - CLICKHOUSE: each clickhouse_sql[].query
*/
export function extractQueryTextStrings(query: Query): string[] {
if (query.queryType === EQueryType.QUERY_BUILDER) {
return extractQueryBuilderTexts(query);
}
if (query.queryType === EQueryType.PROM) {
return extractPromQLTexts(query);
}
if (query.queryType === EQueryType.CLICKHOUSE) {
return extractClickhouseSQLTexts(query);
}
return [];
}
/**
* Given a widget Query and an array of variable names, returns the subset of
* variable names that are referenced in the query text.
*
* This performs text-based detection only. Structural checks (like
* filter.key.key matching a variable attribute) are NOT included.
*/
export function getVariableReferencesInQuery(
query: Query,
variableNames: string[],
): string[] {
const texts = extractQueryTextStrings(query);
if (texts.length === 0) {
return [];
}
return variableNames.filter((name) =>
texts.some((text) => textContainsVariableReference(text, name)),
);
}

View File

@@ -128,15 +128,6 @@
opacity: 1;
}
.legend-item-label-trigger {
display: flex;
align-items: center;
gap: 6px;
flex: 1;
min-width: 0;
cursor: pointer;
}
.legend-marker {
border-width: 2px;
border-radius: 50%;
@@ -166,34 +157,10 @@
text-overflow: ellipsis;
white-space: nowrap;
min-width: 0;
user-select: none;
}
.legend-copy-button {
display: none;
align-items: center;
justify-content: center;
flex-shrink: 0;
padding: 2px;
margin: 0;
border: none;
color: var(--bg-vanilla-400);
cursor: pointer;
border-radius: 4px;
opacity: 1;
transition: opacity 0.15s ease, color 0.15s ease;
&:hover {
color: var(--bg-vanilla-100);
}
}
&:hover {
background: rgba(255, 255, 255, 0.05);
.legend-copy-button {
display: flex;
opacity: 1;
}
}
}
@@ -205,17 +172,4 @@
}
}
}
.legend-item {
&:hover {
background: rgba(0, 0, 0, 0.05);
}
.legend-copy-button {
color: var(--bg-ink-400);
&:hover {
color: var(--bg-ink-500);
}
}
}
}

View File

@@ -2,13 +2,11 @@ import { useCallback, useMemo, useRef, useState } from 'react';
import { VirtuosoGrid } from 'react-virtuoso';
import { Input, Tooltip as AntdTooltip } from 'antd';
import cx from 'classnames';
import { useCopyToClipboard } from 'hooks/useCopyToClipboard';
import { LegendItem } from 'lib/uPlotV2/config/types';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
import { Check, Copy } from 'lucide-react';
import { useLegendActions } from '../../hooks/useLegendActions';
import { LegendPosition, LegendProps } from '../types';
import { useLegendActions } from './useLegendActions';
import './Legend.styles.scss';
@@ -34,7 +32,6 @@ export default function Legend({
});
const legendContainerRef = useRef<HTMLDivElement | null>(null);
const [legendSearchQuery, setLegendSearchQuery] = useState('');
const { copyToClipboard, id: copiedId } = useCopyToClipboard();
const legendItems = useMemo(() => Object.values(legendItemsMap), [
legendItemsMap,
@@ -62,53 +59,26 @@ export default function Legend({
);
}, [position, legendSearchQuery, legendItems]);
const handleCopyLegendItem = useCallback(
(e: React.MouseEvent, seriesIndex: number, label: string): void => {
e.stopPropagation();
copyToClipboard(label, seriesIndex);
},
[copyToClipboard],
);
const renderLegendItem = useCallback(
(item: LegendItem): JSX.Element => {
const isCopied = copiedId === item.seriesIndex;
return (
(item: LegendItem): JSX.Element => (
<AntdTooltip key={item.seriesIndex} title={item.label}>
<div
key={item.seriesIndex}
data-legend-item-id={item.seriesIndex}
className={cx('legend-item', `legend-item-${position.toLowerCase()}`, {
'legend-item-off': !item.show,
'legend-item-focused': focusedSeriesIndex === item.seriesIndex,
})}
>
<AntdTooltip title={item.label}>
<div className="legend-item-label-trigger">
<div
className="legend-marker"
style={{ borderColor: String(item.color) }}
data-is-legend-marker={true}
/>
<span className="legend-label">{item.label}</span>
</div>
</AntdTooltip>
<AntdTooltip title={isCopied ? 'Copied' : 'Copy'}>
<button
type="button"
className="legend-copy-button"
onClick={(e): void =>
handleCopyLegendItem(e, item.seriesIndex, item.label ?? '')
}
aria-label={`Copy ${item.label}`}
data-testid="legend-copy"
>
{isCopied ? <Check size={12} /> : <Copy size={12} />}
</button>
</AntdTooltip>
<div
className="legend-marker"
style={{ borderColor: String(item.color) }}
data-is-legend-marker={true}
/>
<span className="legend-label">{item.label}</span>
</div>
);
},
[copiedId, focusedSeriesIndex, handleCopyLegendItem, position],
</AntdTooltip>
),
[focusedSeriesIndex, position],
);
const isEmptyState = useMemo(() => {
@@ -136,7 +106,6 @@ export default function Legend({
placeholder="Search..."
value={legendSearchQuery}
onChange={(e): void => setLegendSearchQuery(e.target.value)}
data-testid="legend-search-input"
className="legend-search-input"
/>
</div>

View File

@@ -1,280 +0,0 @@
import React from 'react';
import {
fireEvent,
render,
RenderResult,
screen,
within,
} from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { LegendItem } from 'lib/uPlotV2/config/types';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
import { useLegendActions } from '../../hooks/useLegendActions';
import Legend from '../Legend/Legend';
import { LegendPosition } from '../types';
const mockWriteText = jest.fn().mockResolvedValue(undefined);
let clipboardSpy: jest.SpyInstance | undefined;
jest.mock('react-virtuoso', () => ({
VirtuosoGrid: ({
data,
itemContent,
className,
}: {
data: LegendItem[];
itemContent: (index: number, item: LegendItem) => React.ReactNode;
className?: string;
}): JSX.Element => (
<div data-testid="virtuoso-grid" className={className}>
{data.map((item, index) => (
<div key={item.seriesIndex ?? index} data-testid="legend-item-wrapper">
{itemContent(index, item)}
</div>
))}
</div>
),
}));
jest.mock('lib/uPlotV2/hooks/useLegendsSync');
jest.mock('lib/uPlotV2/hooks/useLegendActions');
const mockUseLegendsSync = useLegendsSync as jest.MockedFunction<
typeof useLegendsSync
>;
const mockUseLegendActions = useLegendActions as jest.MockedFunction<
typeof useLegendActions
>;
describe('Legend', () => {
beforeAll(() => {
// JSDOM does not define navigator.clipboard; add it so we can spy on writeText
Object.defineProperty(navigator, 'clipboard', {
value: { writeText: () => Promise.resolve() },
writable: true,
configurable: true,
});
});
const baseLegendItemsMap = {
0: {
seriesIndex: 0,
label: 'A',
show: true,
color: '#ff0000',
},
1: {
seriesIndex: 1,
label: 'B',
show: false,
color: '#00ff00',
},
2: {
seriesIndex: 2,
label: 'C',
show: true,
color: '#0000ff',
},
};
let onLegendClick: jest.Mock;
let onLegendMouseMove: jest.Mock;
let onLegendMouseLeave: jest.Mock;
let onFocusSeries: jest.Mock;
beforeEach(() => {
onLegendClick = jest.fn();
onLegendMouseMove = jest.fn();
onLegendMouseLeave = jest.fn();
onFocusSeries = jest.fn();
mockWriteText.mockClear();
clipboardSpy = jest
.spyOn(navigator.clipboard, 'writeText')
.mockImplementation(mockWriteText);
mockUseLegendsSync.mockReturnValue({
legendItemsMap: baseLegendItemsMap,
focusedSeriesIndex: 1,
setFocusedSeriesIndex: jest.fn(),
});
mockUseLegendActions.mockReturnValue({
onLegendClick,
onLegendMouseMove,
onLegendMouseLeave,
onFocusSeries,
});
});
afterEach(() => {
clipboardSpy?.mockRestore();
jest.clearAllMocks();
});
const renderLegend = (position?: LegendPosition): RenderResult =>
render(
<Legend
position={position}
// config is not used directly in the component, it's consumed by the mocked hook
config={{} as any}
/>,
);
describe('layout and position', () => {
it('renders search input when legend position is RIGHT', () => {
renderLegend(LegendPosition.RIGHT);
expect(screen.getByTestId('legend-search-input')).toBeInTheDocument();
});
it('does not render search input when legend position is BOTTOM (default)', () => {
renderLegend();
expect(screen.queryByTestId('legend-search-input')).not.toBeInTheDocument();
});
it('renders the marker with the correct border color', () => {
renderLegend(LegendPosition.RIGHT);
const legendMarker = document.querySelector(
'[data-legend-item-id="0"] [data-is-legend-marker="true"]',
) as HTMLElement;
expect(legendMarker).toHaveStyle({
'border-color': '#ff0000',
});
});
it('renders all legend items in the grid by default', () => {
renderLegend(LegendPosition.RIGHT);
expect(screen.getByTestId('virtuoso-grid')).toBeInTheDocument();
expect(screen.getByText('A')).toBeInTheDocument();
expect(screen.getByText('B')).toBeInTheDocument();
expect(screen.getByText('C')).toBeInTheDocument();
});
});
describe('search behavior (RIGHT position)', () => {
it('filters legend items based on search query (case-insensitive)', async () => {
const user = userEvent.setup();
renderLegend(LegendPosition.RIGHT);
const searchInput = screen.getByTestId('legend-search-input');
await user.type(searchInput, 'A');
expect(screen.getByText('A')).toBeInTheDocument();
expect(screen.queryByText('B')).not.toBeInTheDocument();
expect(screen.queryByText('C')).not.toBeInTheDocument();
});
it('shows empty state when no legend items match the search query', async () => {
const user = userEvent.setup();
renderLegend(LegendPosition.RIGHT);
const searchInput = screen.getByTestId('legend-search-input');
await user.type(searchInput, 'network');
expect(
screen.getByText(/No series found matching "network"/i),
).toBeInTheDocument();
expect(screen.queryByTestId('virtuoso-grid')).not.toBeInTheDocument();
});
it('does not filter or show empty state when search query is empty or only whitespace', async () => {
const user = userEvent.setup();
renderLegend(LegendPosition.RIGHT);
const searchInput = screen.getByTestId('legend-search-input');
await user.type(searchInput, ' ');
expect(
screen.queryByText(/No series found matching/i),
).not.toBeInTheDocument();
expect(screen.getByText('A')).toBeInTheDocument();
expect(screen.getByText('B')).toBeInTheDocument();
expect(screen.getByText('C')).toBeInTheDocument();
});
});
describe('legend actions', () => {
it('calls onLegendClick when a legend item is clicked', async () => {
const user = userEvent.setup();
renderLegend(LegendPosition.RIGHT);
await user.click(screen.getByText('A'));
expect(onLegendClick).toHaveBeenCalledTimes(1);
});
it('calls mouseMove when the mouse moves over a legend item', async () => {
const user = userEvent.setup();
renderLegend(LegendPosition.RIGHT);
const legendItem = document.querySelector(
'[data-legend-item-id="0"]',
) as HTMLElement;
await user.hover(legendItem);
expect(onLegendMouseMove).toHaveBeenCalledTimes(1);
});
it('calls onLegendMouseLeave when the mouse leaves the legend container', async () => {
const user = userEvent.setup();
renderLegend(LegendPosition.RIGHT);
const container = document.querySelector('.legend-container') as HTMLElement;
await user.hover(container);
await user.unhover(container);
expect(onLegendMouseLeave).toHaveBeenCalledTimes(1);
});
});
describe('copy action', () => {
it('copies the legend label to clipboard when copy button is clicked', () => {
renderLegend(LegendPosition.RIGHT);
const firstLegendItem = document.querySelector(
'[data-legend-item-id="0"]',
) as HTMLElement;
const copyButton = within(firstLegendItem).getByTestId('legend-copy');
fireEvent.click(copyButton);
expect(mockWriteText).toHaveBeenCalledTimes(1);
expect(mockWriteText).toHaveBeenCalledWith('A');
});
it('copies the correct label when copy is clicked on a different legend item', () => {
renderLegend(LegendPosition.RIGHT);
const thirdLegendItem = document.querySelector(
'[data-legend-item-id="2"]',
) as HTMLElement;
const copyButton = within(thirdLegendItem).getByTestId('legend-copy');
fireEvent.click(copyButton);
expect(mockWriteText).toHaveBeenCalledTimes(1);
expect(mockWriteText).toHaveBeenCalledWith('C');
});
it('does not call onLegendClick when copy button is clicked', () => {
renderLegend(LegendPosition.RIGHT);
const firstLegendItem = document.querySelector(
'[data-legend-item-id="0"]',
) as HTMLElement;
const copyButton = within(firstLegendItem).getByTestId('legend-copy');
fireEvent.click(copyButton);
expect(onLegendClick).not.toHaveBeenCalled();
});
});
});

View File

@@ -4,12 +4,12 @@ import uPlot, { Axis } from 'uplot';
import { uPlotXAxisValuesFormat } from '../../uPlotLib/utils/constants';
import getGridColor from '../../uPlotLib/utils/getGridColor';
import { buildYAxisSizeCalculator } from '../utils/axis';
import { AxisProps, ConfigBuilder } from './types';
const PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT = [
PANEL_TYPES.TIME_SERIES,
PANEL_TYPES.BAR,
PANEL_TYPES.PIE,
];
/**
@@ -114,6 +114,81 @@ export class UPlotAxisBuilder extends ConfigBuilder<AxisProps, Axis> {
: undefined;
}
/**
* Calculate axis size from existing size property
*/
private getExistingAxisSize(
self: uPlot,
axis: Axis,
values: string[] | undefined,
axisIdx: number,
cycleNum: number,
): number {
const internalSize = (axis as { _size?: number })._size;
if (internalSize !== undefined) {
return internalSize;
}
const existingSize = axis.size;
if (typeof existingSize === 'function') {
return existingSize(self, values ?? [], axisIdx, cycleNum);
}
return existingSize ?? 0;
}
/**
* Calculate text width for longest value
*/
private calculateTextWidth(
self: uPlot,
axis: Axis,
values: string[] | undefined,
): number {
if (!values || values.length === 0) {
return 0;
}
// Find longest value
const longestVal = values.reduce(
(acc, val) => (val.length > acc.length ? val : acc),
'',
);
if (longestVal === '' || !axis.font?.[0]) {
return 0;
}
// eslint-disable-next-line prefer-destructuring, no-param-reassign
self.ctx.font = axis.font[0];
return self.ctx.measureText(longestVal).width / devicePixelRatio;
}
/**
* Build Y-axis dynamic size calculator
*/
private buildYAxisSizeCalculator(): uPlot.Axis.Size {
return (
self: uPlot,
values: string[] | undefined,
axisIdx: number,
cycleNum: number,
): number => {
const axis = self.axes[axisIdx];
// Bail out, force convergence
if (cycleNum > 1) {
return this.getExistingAxisSize(self, axis, values, axisIdx, cycleNum);
}
const gap = this.props.gap ?? 5;
let axisSize = (axis.ticks?.size ?? 0) + gap;
axisSize += this.calculateTextWidth(self, axis, values);
return Math.ceil(axisSize);
};
}
/**
* Build dynamic size calculator for Y-axis
*/
@@ -127,7 +202,7 @@ export class UPlotAxisBuilder extends ConfigBuilder<AxisProps, Axis> {
// Y-axis needs dynamic sizing based on text width
if (scaleKey === 'y') {
return buildYAxisSizeCalculator(this.props.gap ?? 5);
return this.buildYAxisSizeCalculator();
}
return undefined;

View File

@@ -1,4 +1,3 @@
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
import { getStoredSeriesVisibility } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { thresholdsDrawHook } from 'lib/uPlotV2/hooks/useThresholdsDrawHook';
@@ -236,9 +235,9 @@ export class UPlotConfigBuilder extends ConfigBuilder<
}
/**
* Returns stored series visibility by index from localStorage when preferences source is LOCAL_STORAGE, otherwise null.
* Returns stored series visibility map from localStorage when preferences source is LOCAL_STORAGE, otherwise null.
*/
private getStoredVisibility(): SeriesVisibilityState | null {
private getStoredVisibilityMap(): Map<string, boolean> | null {
if (
this.widgetId &&
this.selectionPreferencesSource === SelectionPreferencesSource.LOCAL_STORAGE
@@ -252,23 +251,22 @@ export class UPlotConfigBuilder extends ConfigBuilder<
* Get legend items with visibility state restored from localStorage if available
*/
getLegendItems(): Record<number, LegendItem> {
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
const visibilityMap = this.getStoredVisibilityMap();
const isAnySeriesHidden = !!(
visibilityMap && Array.from(visibilityMap.values()).some((show) => !show)
);
return this.series.reduce((acc, s: UPlotSeriesBuilder, index: number) => {
const seriesConfig = s.getConfig();
const label = seriesConfig.label ?? '';
// +1 because uPlot series 0 is x-axis/time; data series are at 1, 2, ... (also matches stored visibility[0]=time, visibility[1]=first data, ...)
const seriesIndex = index + 1;
const show = resolveSeriesVisibility({
seriesIndex,
seriesShow: seriesConfig.show,
seriesLabel: label,
seriesVisibilityState,
const seriesIndex = index + 1; // +1 because the first series is the timestamp
const show = resolveSeriesVisibility(
label,
seriesConfig.show,
visibilityMap,
isAnySeriesHidden,
});
);
acc[seriesIndex] = {
seriesIndex,
@@ -296,23 +294,22 @@ export class UPlotConfigBuilder extends ConfigBuilder<
...DEFAULT_PLOT_CONFIG,
};
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
const visibilityMap = this.getStoredVisibilityMap();
const isAnySeriesHidden = !!(
visibilityMap && Array.from(visibilityMap.values()).some((show) => !show)
);
config.series = [
{ value: (): string => '' }, // Base series for timestamp
...this.series.map((s, index) => {
...this.series.map((s) => {
const series = s.getConfig();
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
const visible = resolveSeriesVisibility({
seriesIndex: index + 1,
seriesShow: series.show,
seriesLabel: series.label ?? '',
seriesVisibilityState,
const label = series.label ?? '';
const visible = resolveSeriesVisibility(
label,
series.show,
visibilityMap,
isAnySeriesHidden,
});
);
return {
...series,
show: visible,

View File

@@ -15,33 +15,7 @@ import {
* Builder for uPlot series configuration
* Handles creation of series settings
*/
/**
* Path builders are static and shared across all instances of UPlotSeriesBuilder
*/
let builders: PathBuilders | null = null;
export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
constructor(props: SeriesProps) {
super(props);
const pathBuilders = uPlot.paths;
if (!builders) {
const linearBuilder = pathBuilders.linear;
const splineBuilder = pathBuilders.spline;
const steppedBuilder = pathBuilders.stepped;
if (!linearBuilder || !splineBuilder || !steppedBuilder) {
throw new Error('Required uPlot path builders are not available');
}
builders = {
linear: linearBuilder(),
spline: splineBuilder(),
stepBefore: steppedBuilder({ align: -1 }),
stepAfter: steppedBuilder({ align: 1 }),
};
}
}
private buildLineConfig({
lineColor,
lineWidth,
@@ -224,6 +198,8 @@ interface PathBuilders {
[key: string]: Series.PathBuilder;
}
let builders: PathBuilders | null = null;
/**
* Get path builder based on draw style and interpolation
*/
@@ -231,8 +207,23 @@ function getPathBuilder(
style: DrawStyle,
lineInterpolation?: LineInterpolation,
): Series.PathBuilder {
const pathBuilders = uPlot.paths;
if (!builders) {
throw new Error('Required uPlot path builders are not available');
const linearBuilder = pathBuilders.linear;
const splineBuilder = pathBuilders.spline;
const steppedBuilder = pathBuilders.stepped;
if (!linearBuilder || !splineBuilder || !steppedBuilder) {
throw new Error('Required uPlot path builders are not available');
}
builders = {
linear: linearBuilder(),
spline: splineBuilder(),
stepBefore: steppedBuilder({ align: -1 }),
stepAfter: steppedBuilder({ align: 1 }),
};
}
if (style === DrawStyle.Line) {

View File

@@ -1,393 +0,0 @@
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { uPlotXAxisValuesFormat } from 'lib/uPlotLib/utils/constants';
import type uPlot from 'uplot';
import type { AxisProps } from '../types';
import { UPlotAxisBuilder } from '../UPlotAxisBuilder';
jest.mock('components/Graph/yAxisConfig', () => ({
getToolTipValue: jest.fn(),
}));
const createAxisProps = (overrides: Partial<AxisProps> = {}): AxisProps => ({
scaleKey: 'x',
label: 'Time',
isDarkMode: false,
show: true,
...overrides,
});
describe('UPlotAxisBuilder', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('builds basic axis config with defaults', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
label: 'Time',
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('x');
expect(config.label).toBe('Time');
expect(config.show).toBe(true);
expect(config.side).toBe(2);
expect(config.gap).toBe(5);
// Default grid and ticks are created
expect(config.grid).toEqual({
stroke: 'rgba(0,0,0,0.5)',
width: 0.2,
show: true,
});
expect(config.ticks).toEqual({
width: 0.3,
show: true,
});
});
it('sets config values when provided', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
label: 'Time',
show: false,
side: 0,
gap: 10,
grid: {
stroke: '#ff0000',
width: 1,
show: false,
},
ticks: {
stroke: '#00ff00',
width: 1,
show: false,
size: 10,
},
values: ['1', '2', '3'],
space: 20,
size: 100,
stroke: '#0000ff',
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('x');
expect(config.label).toBe('Time');
expect(config.show).toBe(false);
expect(config.gap).toBe(10);
expect(config.grid).toEqual({
stroke: '#ff0000',
width: 1,
show: false,
});
expect(config.ticks).toEqual({
stroke: '#00ff00',
width: 1,
show: false,
size: 10,
});
expect(config.values).toEqual(['1', '2', '3']);
expect(config.space).toBe(20);
expect(config.size).toBe(100);
expect(config.stroke).toBe('#0000ff');
});
it('merges custom grid config over defaults and respects isDarkMode and isLogScale', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
isDarkMode: true,
isLogScale: true,
grid: {
width: 1,
},
}),
);
const config = builder.getConfig();
expect(config.grid).toEqual({
// stroke falls back to theme-based default when not provided
stroke: 'rgba(231,233,237,0.3)',
// provided width overrides default
width: 1,
// show falls back to default when not provided
show: true,
});
});
it('uses provided ticks config when present and falls back to defaults otherwise', () => {
const customTicks = { width: 1, show: false };
const withTicks = new UPlotAxisBuilder(
createAxisProps({
ticks: customTicks,
}),
);
const withoutTicks = new UPlotAxisBuilder(createAxisProps());
expect(withTicks.getConfig().ticks).toBe(customTicks);
expect(withoutTicks.getConfig().ticks).toEqual({
width: 0.3,
show: true,
});
});
it('uses time-based X-axis values formatter for time-series like panels', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.TIME_SERIES,
}),
);
const config = builder.getConfig();
expect(config.values).toBe(uPlotXAxisValuesFormat);
});
it('does not attach X-axis datetime formatter when panel type is not supported', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.LIST, // not in PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT
}),
);
const config = builder.getConfig();
expect(config.values).toBeUndefined();
});
it('builds Y-axis values formatter that delegates to getToolTipValue', () => {
const yBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
yAxisUnit: 'ms',
decimalPrecision: 3,
}),
);
const config = yBuilder.getConfig();
expect(typeof config.values).toBe('function');
(getToolTipValue as jest.Mock).mockImplementation(
(value: string, unit?: string, precision?: unknown) =>
`formatted:${value}:${unit}:${precision}`,
);
// Simulate uPlot calling the values formatter
const valuesFn = (config.values as unknown) as (
self: uPlot,
vals: unknown[],
) => string[];
const result = valuesFn({} as uPlot, [1, null, 2, Number.NaN]);
expect(getToolTipValue).toHaveBeenCalledTimes(2);
expect(getToolTipValue).toHaveBeenNthCalledWith(1, '1', 'ms', 3);
expect(getToolTipValue).toHaveBeenNthCalledWith(2, '2', 'ms', 3);
// Null/NaN values should map to empty strings
expect(result).toEqual(['formatted:1:ms:3', '', 'formatted:2:ms:3', '']);
});
it('adds dynamic size calculator only for Y-axis when size is not provided', () => {
const yBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
}),
);
const xBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
}),
);
const yConfig = yBuilder.getConfig();
const xConfig = xBuilder.getConfig();
expect(typeof yConfig.size).toBe('function');
expect(xConfig.size).toBeUndefined();
});
it('uses explicit size function when provided', () => {
const sizeFn: uPlot.Axis.Size = jest.fn(() => 100) as uPlot.Axis.Size;
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
size: sizeFn,
}),
);
const config = builder.getConfig();
expect(config.size).toBe(sizeFn);
});
it('builds stroke color based on stroke and isDarkMode', () => {
const explicitStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: '#ff0000',
}),
);
const darkStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: undefined,
isDarkMode: true,
}),
);
const lightStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: undefined,
isDarkMode: false,
}),
);
expect(explicitStroke.getConfig().stroke).toBe('#ff0000');
expect(darkStroke.getConfig().stroke).toBe('white');
expect(lightStroke.getConfig().stroke).toBe('black');
});
it('uses explicit values formatter when provided', () => {
const customValues: uPlot.Axis.Values = jest.fn(() => ['a', 'b', 'c']);
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
values: customValues,
}),
);
const config = builder.getConfig();
expect(config.values).toBe(customValues);
});
it('returns undefined values for scaleKey neither x nor y', () => {
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'custom' }));
const config = builder.getConfig();
expect(config.values).toBeUndefined();
});
it('includes space in config when provided', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', space: 50 }),
);
const config = builder.getConfig();
expect(config.space).toBe(50);
});
it('includes PANEL_TYPES.BAR and PANEL_TYPES.TIME_SERIES in X-axis datetime formatter', () => {
const barBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.BAR,
}),
);
expect(barBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
const timeSeriesBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.TIME_SERIES,
}),
);
expect(timeSeriesBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
});
it('should return the existing size when cycleNum > 1', () => {
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'y' }));
const config = builder.getConfig();
const sizeFn = config.size;
expect(typeof sizeFn).toBe('function');
const mockAxis = {
_size: 80,
ticks: { size: 10 },
font: ['12px sans-serif'],
};
const mockSelf = ({
axes: [mockAxis],
ctx: { measureText: jest.fn(() => ({ width: 60 })), font: '' },
} as unknown) as uPlot;
const result = (sizeFn as (
s: uPlot,
v: string[],
a: number,
c: number,
) => number)(
mockSelf,
['100', '200'],
0,
2, // cycleNum > 1
);
expect(result).toBe(80);
});
it('should invoke the size calculator and compute from text width when cycleNum <= 1', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', gap: 8 }),
);
const config = builder.getConfig();
const sizeFn = config.size;
expect(typeof sizeFn).toBe('function');
const mockAxis = {
ticks: { size: 12 },
font: ['12px sans-serif'],
};
const measureText = jest.fn(() => ({ width: 48 }));
const mockSelf = ({
axes: [mockAxis],
ctx: {
measureText,
get font() {
return '';
},
set font(_v: string) {
/* noop */
},
},
} as unknown) as uPlot;
const result = (sizeFn as (
s: uPlot,
v: string[],
a: number,
c: number,
) => number)(
mockSelf,
['10', '2000ms'],
0,
0, // cycleNum <= 1
);
expect(measureText).toHaveBeenCalledWith('2000ms');
expect(result).toBeGreaterThanOrEqual(12 + 8);
});
it('merge updates axis props', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', label: 'Original' }),
);
builder.merge({ label: 'Merged', yAxisUnit: 'bytes' });
const config = builder.getConfig();
expect(config.label).toBe('Merged');
expect(config.values).toBeDefined();
});
});

View File

@@ -1,337 +0,0 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import uPlot from 'uplot';
import type { SeriesProps } from '../types';
import { DrawStyle, SelectionPreferencesSource } from '../types';
import { UPlotConfigBuilder } from '../UPlotConfigBuilder';
// Mock only the real boundary that hits localStorage
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
}),
);
const getStoredSeriesVisibilityMock = jest.requireMock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
) as {
getStoredSeriesVisibility: jest.Mock;
};
describe('UPlotConfigBuilder', () => {
beforeEach(() => {
jest.clearAllMocks();
});
const createSeriesProps = (
overrides: Partial<SeriesProps> = {},
): SeriesProps => ({
scaleKey: 'y',
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
panelType: PANEL_TYPES.TIME_SERIES,
...overrides,
});
it('returns correct save selection preference flag from constructor args', () => {
const builder = new UPlotConfigBuilder({
shouldSaveSelectionPreference: true,
});
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
it('returns widgetId from constructor args', () => {
const builder = new UPlotConfigBuilder({ widgetId: 'widget-123' });
expect(builder.getWidgetId()).toBe('widget-123');
});
it('sets tzDate from constructor and includes it in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder({ tzDate });
const config = builder.getConfig();
expect(config.tzDate).toBe(tzDate);
});
it('does not call onDragSelect for click without drag (width === 0)', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
expect(setSelectHooks.length).toBe(1);
const uplotInstance = ({
select: { left: 10, width: 0 },
posToVal: jest.fn(),
} as unknown) as uPlot;
// Simulate uPlot calling the hook
const setSelectHook = setSelectHooks[0];
setSelectHook!(uplotInstance);
expect(onDragSelect).not.toHaveBeenCalled();
});
it('calls onDragSelect with start and end times in milliseconds for a drag selection', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
expect(setSelectHooks.length).toBe(1);
const posToVal = jest
.fn()
// left position
.mockReturnValueOnce(100)
// left + width
.mockReturnValueOnce(110);
const uplotInstance = ({
select: { left: 50, width: 20 },
posToVal,
} as unknown) as uPlot;
const setSelectHook = setSelectHooks[0];
setSelectHook!(uplotInstance);
expect(onDragSelect).toHaveBeenCalledTimes(1);
// 100 and 110 seconds converted to milliseconds
expect(onDragSelect).toHaveBeenCalledWith(100_000, 110_000);
});
it('adds and removes hooks via addHook, and exposes them through getConfig', () => {
const builder = new UPlotConfigBuilder();
const drawHook = jest.fn();
const remove = builder.addHook('draw', drawHook as uPlot.Hooks.Defs['draw']);
let config = builder.getConfig();
expect(config.hooks?.draw).toContain(drawHook);
// Remove and ensure it no longer appears in config
remove();
config = builder.getConfig();
expect(config.hooks?.draw ?? []).not.toContain(drawHook);
});
it('adds axes, scales, and series and wires them into the final config', () => {
const builder = new UPlotConfigBuilder();
// Add axis and scale
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addScale({ scaleKey: 'y' });
// Add two series legend indices should start from 1 (0 is the timestamp series)
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
const config = builder.getConfig();
// Axes
expect(config.axes).toHaveLength(1);
expect(config.axes?.[0].scale).toBe('y');
// Scales are returned as an object keyed by scaleKey
expect(config.scales).toBeDefined();
expect(Object.keys(config.scales ?? {})).toContain('y');
// Series: base timestamp + 2 data series
expect(config.series).toHaveLength(3);
// Base series (index 0) has a value formatter that returns empty string
const baseSeries = config.series?.[0] as { value?: () => string };
expect(typeof baseSeries?.value).toBe('function');
expect(baseSeries?.value?.()).toBe('');
// Legend items align with series and carry label and color from series config
const legendItems = builder.getLegendItems();
expect(Object.keys(legendItems)).toEqual(['1', '2']);
expect(legendItems[1].seriesIndex).toBe(1);
expect(legendItems[1].label).toBe('Requests');
expect(legendItems[2].label).toBe('Errors');
});
it('merges axis when addAxis is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder();
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addAxis({ scaleKey: 'y', label: 'Updated Label', show: false });
const config = builder.getConfig();
expect(config.axes).toHaveLength(1);
expect(config.axes?.[0].label).toBe('Updated Label');
expect(config.axes?.[0].show).toBe(false);
});
it('merges scale when addScale is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder();
builder.addScale({ scaleKey: 'y', min: 0 });
builder.addScale({ scaleKey: 'y', max: 100 });
const config = builder.getConfig();
// Only one scale entry for 'y' (merge path used, no duplicate added)
expect(config.scales).toBeDefined();
const scales = config.scales ?? {};
expect(Object.keys(scales)).toEqual(['y']);
expect(scales.y?.range).toBeDefined();
});
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
// Index 0 = x-axis/time; indices 1,2 = data series (Requests, Errors). resolveSeriesVisibility matches by seriesIndex + seriesLabel.
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue({
labels: ['x-axis', 'Requests', 'Errors'],
visibility: [true, true, false],
});
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
const legendItems = builder.getLegendItems();
// When any series is hidden, legend visibility is driven by the stored map
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].show).toBe(false);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.show).toBe(false);
});
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.IN_MEMORY,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.getLegendItems();
builder.getConfig();
expect(
getStoredSeriesVisibilityMock.getStoredSeriesVisibility,
).not.toHaveBeenCalled();
});
it('adds thresholds only once per scale key', () => {
const builder = new UPlotConfigBuilder();
const thresholdsOptions = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 100 }],
};
builder.addThresholds(thresholdsOptions);
builder.addThresholds(thresholdsOptions);
const config = builder.getConfig();
const drawHooks = config.hooks?.draw ?? [];
// Only a single draw hook should be registered for the same scaleKey
expect(drawHooks.length).toBe(1);
});
it('adds multiple thresholds when scale key is different', () => {
const builder = new UPlotConfigBuilder();
const thresholdsOptions = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 100 }],
};
builder.addThresholds(thresholdsOptions);
const thresholdsOptions2 = {
scaleKey: 'y2',
thresholds: [{ thresholdValue: 200 }],
};
builder.addThresholds(thresholdsOptions2);
const config = builder.getConfig();
const drawHooks = config.hooks?.draw ?? [];
// Two draw hooks should be registered for different scaleKeys
expect(drawHooks.length).toBe(2);
});
it('merges cursor configuration with defaults instead of replacing them', () => {
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
});
const config = builder.getConfig();
expect(config.cursor?.drag?.setScale).toBe(false);
// Points configuration from DEFAULT_CURSOR_CONFIG should still be present
expect(config.cursor?.points).toBeDefined();
});
it('adds plugins and includes them in config', () => {
const builder = new UPlotConfigBuilder();
const plugin: uPlot.Plugin = {
opts: (): void => {},
hooks: {},
};
builder.addPlugin(plugin);
const config = builder.getConfig();
expect(config.plugins).toContain(plugin);
});
it('sets padding, legend, focus, select, tzDate, bands and includes them in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder();
const bands: uPlot.Band[] = [{ series: [1, 2], fill: (): string => '#000' }];
builder.setBands(bands);
builder.setPadding([10, 20, 30, 40]);
builder.setLegend({ show: true, live: true });
builder.setFocus({ alpha: 0.5 });
builder.setSelect({ left: 0, width: 0, top: 0, height: 0 });
builder.setTzDate(tzDate);
const config = builder.getConfig();
expect(config.bands).toEqual(bands);
expect(config.padding).toEqual([10, 20, 30, 40]);
expect(config.legend).toEqual({ show: true, live: true });
expect(config.focus).toEqual({ alpha: 0.5 });
expect(config.select).toEqual({ left: 0, width: 0, top: 0, height: 0 });
expect(config.tzDate).toBe(tzDate);
});
it('does not include plugins when none added', () => {
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
expect(config.plugins).toBeUndefined();
});
it('does not include bands when empty', () => {
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
expect(config.bands).toBeUndefined();
});
});

View File

@@ -1,236 +0,0 @@
import type uPlot from 'uplot';
import * as scaleUtils from '../../utils/scale';
import type { ScaleProps } from '../types';
import { DistributionType } from '../types';
import { UPlotScaleBuilder } from '../UPlotScaleBuilder';
const createScaleProps = (overrides: Partial<ScaleProps> = {}): ScaleProps => ({
scaleKey: 'y',
time: false,
auto: undefined,
min: undefined,
max: undefined,
softMin: undefined,
softMax: undefined,
distribution: DistributionType.Linear,
...overrides,
});
describe('UPlotScaleBuilder', () => {
const getFallbackMinMaxSpy = jest.spyOn(
scaleUtils,
'getFallbackMinMaxTimeStamp',
);
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes softMin/softMax correctly when both are 0 (treated as unset)', () => {
const builder = new UPlotScaleBuilder(
createScaleProps({
softMin: 0,
softMax: 0,
}),
);
// Non-time scale so config path uses thresholds pipeline; we just care that
// adjustSoftLimitsWithThresholds receives null soft limits instead of 0/0.
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
builder.getConfig();
expect(adjustSpy).toHaveBeenCalledWith(null, null, undefined, undefined);
});
it('handles time scales using explicit min/max and rounds max down to the previous minute', () => {
const min = 1_700_000_000; // seconds
const max = 1_700_000_600; // seconds
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'x',
time: true,
min,
max,
}),
);
const config = builder.getConfig();
const xScale = config.x;
expect(xScale.time).toBe(true);
expect(xScale.auto).toBe(false);
expect(Array.isArray(xScale.range)).toBe(true);
const [resolvedMin, resolvedMax] = xScale.range as [number, number];
// min is passed through
expect(resolvedMin).toBe(min);
// max is coerced to "endTime - 1 minute" and rounded down to minute precision
const oneMinuteAgoTimestamp = (max - 60) * 1000;
const currentDate = new Date(oneMinuteAgoTimestamp);
currentDate.setSeconds(0);
currentDate.setMilliseconds(0);
const expectedMax = Math.floor(currentDate.getTime() / 1000);
expect(resolvedMax).toBe(expectedMax);
});
it('falls back to getFallbackMinMaxTimeStamp when time scale has no min/max', () => {
getFallbackMinMaxSpy.mockReturnValue({
fallbackMin: 100,
fallbackMax: 200,
});
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'x',
time: true,
min: undefined,
max: undefined,
}),
);
const config = builder.getConfig();
const [resolvedMin, resolvedMax] = config.x.range as [number, number];
expect(getFallbackMinMaxSpy).toHaveBeenCalled();
expect(resolvedMin).toBe(100);
// max is aligned to "fallbackMax - 60 seconds" minute boundary
expect(resolvedMax).toBeLessThanOrEqual(200);
expect(resolvedMax).toBeGreaterThan(100);
});
it('pipes limits through soft-limit adjustment and log-scale normalization before range config', () => {
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
const normalizeSpy = jest.spyOn(scaleUtils, 'normalizeLogScaleLimits');
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
const thresholds = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 10 }],
yAxisUnit: 'ms',
};
const builder = new UPlotScaleBuilder(
createScaleProps({
softMin: 1,
softMax: 5,
min: 0,
max: 100,
distribution: DistributionType.Logarithmic,
thresholds,
logBase: 2,
padMinBy: 0.1,
padMaxBy: 0.2,
}),
);
builder.getConfig();
expect(adjustSpy).toHaveBeenCalledWith(1, 5, thresholds.thresholds, 'ms');
expect(normalizeSpy).toHaveBeenCalledWith({
distr: DistributionType.Logarithmic,
logBase: 2,
limits: {
min: 0,
max: 100,
softMin: expect.anything(),
softMax: expect.anything(),
},
});
expect(getRangeConfigSpy).toHaveBeenCalled();
});
it('computes distribution config for non-time scales and wires range function when range is not provided', () => {
const createRangeFnSpy = jest.spyOn(scaleUtils, 'createRangeFunction');
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
time: false,
distribution: DistributionType.Linear,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(createRangeFnSpy).toHaveBeenCalled();
// range should be a function when not provided explicitly
expect(typeof yScale.range).toBe('function');
// distribution config should be applied
expect(yScale.distr).toBeDefined();
expect(yScale.log).toBeDefined();
});
it('respects explicit range function when provided on props', () => {
const explicitRange: uPlot.Scale.Range = jest.fn(() => [
0,
10,
]) as uPlot.Scale.Range;
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
range: explicitRange,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(yScale.range).toBe(explicitRange);
});
it('derives auto flag when not explicitly provided, based on hasFixedRange and time', () => {
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
const builder = new UPlotScaleBuilder(
createScaleProps({
min: 0,
max: 100,
time: false,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(getRangeConfigSpy).toHaveBeenCalled();
// For non-time scale with fixed min/max, hasFixedRange is true → auto should remain false
expect(yScale.auto).toBe(false);
});
it('merge updates internal min/max/soft limits while preserving other props', () => {
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
min: 0,
max: 10,
softMin: 1,
softMax: 9,
time: false,
}),
);
builder.merge({
min: 2,
softMax: undefined,
});
expect(builder.props.min).toBe(2);
expect(builder.props.softMax).toBe(undefined);
expect(builder.props.max).toBe(10);
expect(builder.props.softMin).toBe(1);
expect(builder.props.time).toBe(false);
expect(builder.props.scaleKey).toBe('y');
expect(builder.props.distribution).toBe(DistributionType.Linear);
expect(builder.props.thresholds).toBe(undefined);
});
});

View File

@@ -1,295 +0,0 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
import uPlot from 'uplot';
import type { SeriesProps } from '../types';
import {
DrawStyle,
LineInterpolation,
LineStyle,
VisibilityMode,
} from '../types';
import { UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
const createBaseProps = (
overrides: Partial<SeriesProps> = {},
): SeriesProps => ({
scaleKey: 'y',
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
isDarkMode: false,
panelType: PANEL_TYPES.TIME_SERIES,
...overrides,
});
interface MockPath extends uPlot.Series.Paths {
name?: string;
}
describe('UPlotSeriesBuilder', () => {
it('maps basic props into uPlot series config', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'Latency',
spanGaps: true,
show: false,
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('y');
expect(config.label).toBe('Latency');
expect(config.spanGaps).toBe(true);
expect(config.show).toBe(false);
expect(config.pxAlign).toBe(true);
expect(typeof config.value).toBe('function');
});
it('uses explicit lineColor when provided, regardless of mapping', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
lineColor: '#ff00ff',
colorMapping: { Requests: '#00ff00' },
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#ff00ff');
});
it('falls back to theme colors when no label is provided', () => {
const darkBuilder = new UPlotSeriesBuilder(
createBaseProps({
label: undefined,
isDarkMode: true,
lineColor: undefined,
}),
);
const lightBuilder = new UPlotSeriesBuilder(
createBaseProps({
label: undefined,
isDarkMode: false,
lineColor: undefined,
}),
);
const darkConfig = darkBuilder.getConfig();
const lightConfig = lightBuilder.getConfig();
expect(darkConfig.stroke).toBe(themeColors.white);
expect(lightConfig.stroke).toBe(themeColors.black);
});
it('uses colorMapping when available and no explicit lineColor is provided', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'Requests',
colorMapping: { Requests: '#123456' },
lineColor: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#123456');
});
it('passes through a custom pathBuilder when provided', () => {
const customPaths = (jest.fn() as unknown) as uPlot.Series.PathBuilder;
const builder = new UPlotSeriesBuilder(
createBaseProps({
pathBuilder: customPaths,
}),
);
const config = builder.getConfig();
expect(config.paths).toBe(customPaths);
});
it('does not build line paths when drawStyle is Points, but still renders points', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Points,
pointSize: 4,
lineWidth: 2,
lineColor: '#aa00aa',
}),
);
const config = builder.getConfig();
expect(typeof config.paths).toBe('function');
expect(config.paths && config.paths({} as uPlot, 1, 0, 10)).toBeNull();
expect(config.points).toBeDefined();
expect(config.points?.stroke).toBe('#aa00aa');
expect(config.points?.fill).toBe('#aa00aa');
expect(config.points?.show).toBe(true);
expect(config.points?.size).toBe(4);
});
it('derives point size based on lineWidth and pointSize', () => {
const smallPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 4,
pointSize: 2,
}),
);
const largePointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 2,
pointSize: 4,
}),
);
const smallConfig = smallPointsBuilder.getConfig();
const largeConfig = largePointsBuilder.getConfig();
expect(smallConfig.points?.size).toBeUndefined();
expect(largeConfig.points?.size).toBe(4);
});
it('uses pointsBuilder when provided instead of default visibility logic', () => {
const pointsBuilder: uPlot.Series.Points.Show = jest.fn(
() => true,
) as uPlot.Series.Points.Show;
const builder = new UPlotSeriesBuilder(
createBaseProps({
pointsBuilder,
drawStyle: DrawStyle.Line,
}),
);
const config = builder.getConfig();
expect(config.points?.show).toBe(pointsBuilder);
});
it('respects VisibilityMode for point visibility when no custom pointsBuilder is given', () => {
const neverPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Never,
}),
);
const alwaysPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Always,
}),
);
const neverConfig = neverPointsBuilder.getConfig();
const alwaysConfig = alwaysPointsBuilder.getConfig();
expect(neverConfig.points?.show).toBe(false);
expect(alwaysConfig.points?.show).toBe(true);
});
it('applies LineStyle.Dashed and lineCap to line config', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
lineStyle: LineStyle.Dashed,
lineCap: 'round' as CanvasLineCap,
}),
);
const config = builder.getConfig();
expect(config.dash).toEqual([10, 10]);
expect(config.cap).toBe('round');
});
it('builds default paths for Line drawStyle and invokes the path builder', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.Linear,
}),
);
const config = builder.getConfig();
const result = config.paths?.({} as uPlot, 1, 0, 10);
expect((result as MockPath).name).toBe('linear');
});
it('uses StepBefore and StepAfter interpolation for line paths', () => {
const stepBeforeBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.StepBefore,
}),
);
const stepAfterBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.StepAfter,
}),
);
const stepBeforeConfig = stepBeforeBuilder.getConfig();
const stepAfterConfig = stepAfterBuilder.getConfig();
const stepBeforePath = stepBeforeConfig.paths?.({} as uPlot, 1, 0, 5);
const stepAfterPath = stepAfterConfig.paths?.({} as uPlot, 1, 0, 5);
expect((stepBeforePath as MockPath).name).toBe('stepped-(-1)');
expect((stepAfterPath as MockPath).name).toBe('stepped-(1)');
});
it('defaults to spline interpolation when lineInterpolation is Spline or undefined', () => {
const splineBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.Spline,
}),
);
const defaultBuilder = new UPlotSeriesBuilder(
createBaseProps({ drawStyle: DrawStyle.Line }),
);
const splineConfig = splineBuilder.getConfig();
const defaultConfig = defaultBuilder.getConfig();
const splinePath = splineConfig.paths?.({} as uPlot, 1, 0, 10);
const defaultPath = defaultConfig.paths?.({} as uPlot, 1, 0, 10);
expect((splinePath as MockPath).name).toBe('spline');
expect((defaultPath as MockPath).name).toBe('spline');
});
it('uses generateColor when label has no colorMapping and no lineColor', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'CustomSeries',
colorMapping: {},
lineColor: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#E64A3C');
});
it('passes through pointsFilter when provided', () => {
const pointsFilter: uPlot.Series.Points.Filter = jest.fn(
(_self, _seriesIdx, _show) => null,
);
const builder = new UPlotSeriesBuilder(
createBaseProps({
pointsFilter,
drawStyle: DrawStyle.Line,
}),
);
const config = builder.getConfig();
expect(config.points?.filter).toBe(pointsFilter);
});
});

View File

@@ -1,395 +0,0 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { updateSeriesVisibilityToLocalStorage } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
import {
PlotContextProvider,
usePlotContext,
} from 'lib/uPlotV2/context/PlotContext';
import type uPlot from 'uplot';
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
updateSeriesVisibilityToLocalStorage: jest.fn(),
}),
);
const mockUpdateSeriesVisibilityToLocalStorage = updateSeriesVisibilityToLocalStorage as jest.MockedFunction<
typeof updateSeriesVisibilityToLocalStorage
>;
interface MockSeries extends Partial<uPlot.Series> {
label?: string;
show?: boolean;
}
const createMockPlot = (series: MockSeries[] = []): uPlot =>
(({
series,
batch: jest.fn((fn: () => void) => fn()),
setSeries: jest.fn(),
} as unknown) as uPlot);
interface TestComponentProps {
plot?: uPlot;
widgetId?: string;
shouldSaveSelectionPreference?: boolean;
}
const TestComponent = ({
plot,
widgetId,
shouldSaveSelectionPreference,
}: TestComponentProps): JSX.Element => {
const {
setPlotContextInitialState,
syncSeriesVisibilityToLocalStorage,
onToggleSeriesVisibility,
onToggleSeriesOnOff,
onFocusSeries,
} = usePlotContext();
const handleInit = (): void => {
if (
!plot ||
!widgetId ||
typeof shouldSaveSelectionPreference !== 'boolean'
) {
return;
}
setPlotContextInitialState({
uPlotInstance: plot,
widgetId,
shouldSaveSelectionPreference,
});
};
return (
<div>
<button type="button" data-testid="init" onClick={handleInit}>
Init
</button>
<button
type="button"
data-testid="sync-visibility"
onClick={(): void => syncSeriesVisibilityToLocalStorage()}
>
Sync visibility
</button>
<button
type="button"
data-testid="toggle-visibility"
onClick={(): void => onToggleSeriesVisibility(1)}
>
Toggle visibility
</button>
<button
type="button"
data-testid="toggle-on-off-1"
onClick={(): void => onToggleSeriesOnOff(1)}
>
Toggle on/off 1
</button>
<button
type="button"
data-testid="toggle-on-off-5"
onClick={(): void => onToggleSeriesOnOff(5)}
>
Toggle on/off 5
</button>
<button
type="button"
data-testid="focus-series"
onClick={(): void => onFocusSeries(1)}
>
Focus series
</button>
</div>
);
};
describe('PlotContext', () => {
afterEach(() => {
jest.clearAllMocks();
});
it('throws when usePlotContext is used outside provider', () => {
const Consumer = (): JSX.Element => {
// eslint-disable-next-line react-hooks/rules-of-hooks
usePlotContext();
return <div />;
};
expect(() => render(<Consumer />)).toThrow(
'Should be used inside the context',
);
});
it('syncSeriesVisibilityToLocalStorage does nothing without plot or widgetId', async () => {
const user = userEvent.setup();
render(
<PlotContextProvider>
<TestComponent />
</PlotContextProvider>,
);
await user.click(screen.getByTestId('sync-visibility'));
expect(mockUpdateSeriesVisibilityToLocalStorage).not.toHaveBeenCalled();
});
it('syncSeriesVisibilityToLocalStorage serializes series visibility to localStorage helper', async () => {
const user = userEvent.setup();
const plot = createMockPlot([
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-123"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('sync-visibility'));
expect(mockUpdateSeriesVisibilityToLocalStorage).toHaveBeenCalledTimes(1);
expect(mockUpdateSeriesVisibilityToLocalStorage).toHaveBeenCalledWith(
'widget-123',
[
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
],
);
});
describe('onToggleSeriesVisibility', () => {
it('does nothing when plot instance is not set', async () => {
const user = userEvent.setup();
render(
<PlotContextProvider>
<TestComponent />
</PlotContextProvider>,
);
await user.click(screen.getByTestId('toggle-visibility'));
// No errors and no calls to localStorage helper
expect(mockUpdateSeriesVisibilityToLocalStorage).not.toHaveBeenCalled();
});
it('highlights a single series and saves visibility when preferences are enabled', async () => {
const user = userEvent.setup();
const series: MockSeries[] = [
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
{ label: 'Memory', show: true },
];
const plot = createMockPlot(series);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-visibility"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('toggle-visibility'));
const setSeries = (plot.setSeries as jest.Mock).mock.calls;
// index 0 is skipped, so we expect calls for 1 and 2
expect(setSeries).toEqual([
[1, { show: true }],
[2, { show: false }],
]);
expect(mockUpdateSeriesVisibilityToLocalStorage).toHaveBeenCalledTimes(1);
expect(mockUpdateSeriesVisibilityToLocalStorage).toHaveBeenCalledWith(
'widget-visibility',
[
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
{ label: 'Memory', show: true },
],
);
});
it('resets visibility for all series when toggling the same index again', async () => {
const user = userEvent.setup();
const series: MockSeries[] = [
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
{ label: 'Memory', show: true },
];
const plot = createMockPlot(series);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-reset"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('toggle-visibility'));
(plot.setSeries as jest.Mock).mockClear();
await user.click(screen.getByTestId('toggle-visibility'));
const setSeries = (plot.setSeries as jest.Mock).mock.calls;
// After reset, all non-zero series should be shown
expect(setSeries).toEqual([
[1, { show: true }],
[2, { show: true }],
]);
});
});
describe('onToggleSeriesOnOff', () => {
it('does nothing when plot instance is not set', async () => {
const user = userEvent.setup();
render(
<PlotContextProvider>
<TestComponent />
</PlotContextProvider>,
);
await user.click(screen.getByTestId('toggle-on-off-1'));
expect(mockUpdateSeriesVisibilityToLocalStorage).not.toHaveBeenCalled();
});
it('toggles series show flag and saves visibility when preferences are enabled', async () => {
const user = userEvent.setup();
const series: MockSeries[] = [
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
];
const plot = createMockPlot(series);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-toggle"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('toggle-on-off-1'));
expect(plot.setSeries).toHaveBeenCalledWith(1, { show: false });
expect(mockUpdateSeriesVisibilityToLocalStorage).toHaveBeenCalledTimes(1);
expect(mockUpdateSeriesVisibilityToLocalStorage).toHaveBeenCalledWith(
'widget-toggle',
expect.any(Array),
);
});
it('does not toggle when target series does not exist', async () => {
const user = userEvent.setup();
const series: MockSeries[] = [{ label: 'x-axis', show: true }];
const plot = createMockPlot(series);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-missing-series"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('toggle-on-off-5'));
expect(plot.setSeries).not.toHaveBeenCalled();
expect(mockUpdateSeriesVisibilityToLocalStorage).not.toHaveBeenCalled();
});
it('does not persist visibility when preferences flag is disabled', async () => {
const user = userEvent.setup();
const series: MockSeries[] = [
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
];
const plot = createMockPlot(series);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-no-persist"
shouldSaveSelectionPreference={false}
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('toggle-on-off-1'));
expect(plot.setSeries).toHaveBeenCalledWith(1, { show: false });
expect(mockUpdateSeriesVisibilityToLocalStorage).not.toHaveBeenCalled();
});
});
describe('onFocusSeries', () => {
it('does nothing when plot instance is not set', async () => {
const user = userEvent.setup();
render(
<PlotContextProvider>
<TestComponent />
</PlotContextProvider>,
);
await user.click(screen.getByTestId('focus-series'));
});
it('sets focus on the given series index', async () => {
const user = userEvent.setup();
const plot = createMockPlot([
{ label: 'x-axis', show: true },
{ label: 'CPU', show: true },
]);
render(
<PlotContextProvider>
<TestComponent
plot={plot}
widgetId="widget-focus"
shouldSaveSelectionPreference={false}
/>
</PlotContextProvider>,
);
await user.click(screen.getByTestId('init'));
await user.click(screen.getByTestId('focus-series'));
expect(plot.setSeries).toHaveBeenCalledWith(1, { focus: true }, false);
});
});
});

View File

@@ -1,201 +0,0 @@
import { renderHook } from '@testing-library/react';
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
import { useLegendActions } from 'lib/uPlotV2/hooks/useLegendActions';
jest.mock('lib/uPlotV2/context/PlotContext');
const mockUsePlotContext = usePlotContext as jest.MockedFunction<
typeof usePlotContext
>;
describe('useLegendActions', () => {
let onToggleSeriesVisibility: jest.Mock;
let onToggleSeriesOnOff: jest.Mock;
let onFocusSeriesPlot: jest.Mock;
let setPlotContextInitialState: jest.Mock;
let syncSeriesVisibilityToLocalStorage: jest.Mock;
let setFocusedSeriesIndexMock: jest.Mock;
let cancelAnimationFrameSpy: jest.SpyInstance<void, [handle: number]>;
beforeAll(() => {
jest
.spyOn(global, 'requestAnimationFrame')
.mockImplementation((cb: FrameRequestCallback): number => {
cb(0);
return 1;
});
cancelAnimationFrameSpy = jest
.spyOn(global, 'cancelAnimationFrame')
.mockImplementation(() => {});
});
afterAll(() => {
jest.restoreAllMocks();
});
beforeEach(() => {
onToggleSeriesVisibility = jest.fn();
onToggleSeriesOnOff = jest.fn();
onFocusSeriesPlot = jest.fn();
setPlotContextInitialState = jest.fn();
syncSeriesVisibilityToLocalStorage = jest.fn();
setFocusedSeriesIndexMock = jest.fn();
mockUsePlotContext.mockReturnValue({
onToggleSeriesVisibility,
onToggleSeriesOnOff,
onFocusSeries: onFocusSeriesPlot,
setPlotContextInitialState,
syncSeriesVisibilityToLocalStorage,
});
cancelAnimationFrameSpy.mockClear();
});
const createMouseEvent = (options: {
legendItemId?: number;
isMarker?: boolean;
}): any => {
const { legendItemId, isMarker = false } = options;
return {
target: {
dataset: {
...(isMarker ? { isLegendMarker: 'true' } : {}),
},
closest: jest.fn(() =>
legendItemId !== undefined
? { dataset: { legendItemId: String(legendItemId) } }
: null,
),
},
};
};
describe('onLegendClick', () => {
it('toggles series visibility when clicking on legend label', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: null,
}),
);
result.current.onLegendClick(createMouseEvent({ legendItemId: 0 }));
expect(onToggleSeriesVisibility).toHaveBeenCalledTimes(1);
expect(onToggleSeriesVisibility).toHaveBeenCalledWith(0);
expect(onToggleSeriesOnOff).not.toHaveBeenCalled();
});
it('toggles series on/off when clicking on marker', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: null,
}),
);
result.current.onLegendClick(
createMouseEvent({ legendItemId: 0, isMarker: true }),
);
expect(onToggleSeriesOnOff).toHaveBeenCalledTimes(1);
expect(onToggleSeriesOnOff).toHaveBeenCalledWith(0);
expect(onToggleSeriesVisibility).not.toHaveBeenCalled();
});
it('does nothing when click target is not inside a legend item', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: null,
}),
);
result.current.onLegendClick(createMouseEvent({}));
expect(onToggleSeriesOnOff).not.toHaveBeenCalled();
expect(onToggleSeriesVisibility).not.toHaveBeenCalled();
});
});
describe('onFocusSeries', () => {
it('schedules focus update and calls plot focus handler via mouse move', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: null,
}),
);
result.current.onLegendMouseMove(createMouseEvent({ legendItemId: 0 }));
expect(setFocusedSeriesIndexMock).toHaveBeenCalledWith(0);
expect(onFocusSeriesPlot).toHaveBeenCalledWith(0);
});
it('cancels previous animation frame before scheduling new one on subsequent mouse moves', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: null,
}),
);
result.current.onLegendMouseMove(createMouseEvent({ legendItemId: 0 }));
result.current.onLegendMouseMove(createMouseEvent({ legendItemId: 1 }));
expect(cancelAnimationFrameSpy).toHaveBeenCalled();
});
});
describe('onLegendMouseMove', () => {
it('focuses new series when hovering over different legend item', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: 0,
}),
);
result.current.onLegendMouseMove(createMouseEvent({ legendItemId: 1 }));
expect(setFocusedSeriesIndexMock).toHaveBeenCalledWith(1);
expect(onFocusSeriesPlot).toHaveBeenCalledWith(1);
});
it('does nothing when hovering over already focused series', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: 1,
}),
);
result.current.onLegendMouseMove(createMouseEvent({ legendItemId: 1 }));
expect(setFocusedSeriesIndexMock).not.toHaveBeenCalled();
expect(onFocusSeriesPlot).not.toHaveBeenCalled();
});
});
describe('onLegendMouseLeave', () => {
it('cancels pending animation frame and clears focus state', async () => {
const { result } = renderHook(() =>
useLegendActions({
setFocusedSeriesIndex: setFocusedSeriesIndexMock,
focusedSeriesIndex: null,
}),
);
result.current.onLegendMouseMove(createMouseEvent({ legendItemId: 0 }));
result.current.onLegendMouseLeave();
expect(cancelAnimationFrameSpy).toHaveBeenCalled();
expect(setFocusedSeriesIndexMock).toHaveBeenCalledWith(null);
expect(onFocusSeriesPlot).toHaveBeenCalledWith(null);
});
});
});

View File

@@ -1,192 +0,0 @@
import { act, cleanup, renderHook } from '@testing-library/react';
import type { LegendItem } from 'lib/uPlotV2/config/types';
import type { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
describe('useLegendsSync', () => {
let requestAnimationFrameSpy: jest.SpyInstance<
number,
[callback: FrameRequestCallback]
>;
let cancelAnimationFrameSpy: jest.SpyInstance<void, [handle: number]>;
beforeAll(() => {
requestAnimationFrameSpy = jest
.spyOn(global, 'requestAnimationFrame')
.mockImplementation((cb: FrameRequestCallback): number => {
cb(0);
return 1;
});
cancelAnimationFrameSpy = jest
.spyOn(global, 'cancelAnimationFrame')
.mockImplementation(() => {});
});
afterEach(() => {
jest.clearAllMocks();
cleanup();
});
afterAll(() => {
jest.restoreAllMocks();
});
const createMockConfig = (
legendItems: Record<number, LegendItem>,
): {
config: UPlotConfigBuilder;
invokeSetSeries: (
seriesIndex: number | null,
opts: { show?: boolean; focus?: boolean },
fireHook?: boolean,
) => void;
} => {
let setSeriesHandler:
| ((u: uPlot, seriesIndex: number | null, opts: uPlot.Series) => void)
| null = null;
const config = ({
getLegendItems: jest.fn(() => legendItems),
addHook: jest.fn(
(
hookName: string,
handler: (
u: uPlot,
seriesIndex: number | null,
opts: uPlot.Series,
) => void,
) => {
if (hookName === 'setSeries') {
setSeriesHandler = handler;
}
return (): void => {
setSeriesHandler = null;
};
},
),
} as unknown) as UPlotConfigBuilder;
const invokeSetSeries = (
seriesIndex: number | null,
opts: { show?: boolean; focus?: boolean },
): void => {
if (setSeriesHandler) {
setSeriesHandler({} as uPlot, seriesIndex, { ...opts });
}
};
return { config, invokeSetSeries };
};
it('initializes legend items from config', () => {
const initialItems: Record<number, LegendItem> = {
1: { seriesIndex: 1, label: 'CPU', show: true, color: '#f00' },
2: { seriesIndex: 2, label: 'Memory', show: false, color: '#0f0' },
};
const { config } = createMockConfig(initialItems);
const { result } = renderHook(() => useLegendsSync({ config }));
expect(config.getLegendItems).toHaveBeenCalledTimes(1);
expect(config.addHook).toHaveBeenCalledWith(
'setSeries',
expect.any(Function),
);
expect(result.current.legendItemsMap).toEqual(initialItems);
});
it('updates focusedSeriesIndex when a series gains focus via setSeries by default', async () => {
const initialItems: Record<number, LegendItem> = {
1: { seriesIndex: 1, label: 'CPU', show: true, color: '#f00' },
};
const { config, invokeSetSeries } = createMockConfig(initialItems);
const { result } = renderHook(() => useLegendsSync({ config }));
expect(result.current.focusedSeriesIndex).toBeNull();
await act(async () => {
invokeSetSeries(1, { focus: true });
});
expect(result.current.focusedSeriesIndex).toBe(1);
});
it('does not update focusedSeriesIndex when subscribeToFocusChange is false', () => {
const initialItems: Record<number, LegendItem> = {
1: { seriesIndex: 1, label: 'CPU', show: true, color: '#f00' },
};
const { config, invokeSetSeries } = createMockConfig(initialItems);
const { result } = renderHook(() =>
useLegendsSync({ config, subscribeToFocusChange: false }),
);
invokeSetSeries(1, { focus: true });
expect(result.current.focusedSeriesIndex).toBeNull();
});
it('updates legendItemsMap visibility when show changes for a series', async () => {
const initialItems: Record<number, LegendItem> = {
0: { seriesIndex: 0, label: 'x-axis', show: true, color: '#000' },
1: { seriesIndex: 1, label: 'CPU', show: true, color: '#f00' },
};
const { config, invokeSetSeries } = createMockConfig(initialItems);
const { result } = renderHook(() => useLegendsSync({ config }));
// Toggle visibility of series 1
await act(async () => {
invokeSetSeries(1, { show: false });
});
expect(result.current.legendItemsMap[1].show).toBe(false);
});
it('ignores visibility updates for unknown legend items or unchanged show values', () => {
const initialItems: Record<number, LegendItem> = {
1: { seriesIndex: 1, label: 'CPU', show: true, color: '#f00' },
};
const { config, invokeSetSeries } = createMockConfig(initialItems);
const { result } = renderHook(() => useLegendsSync({ config }));
const before = result.current.legendItemsMap;
// Unknown series index
invokeSetSeries(5, { show: false });
// Unchanged visibility for existing item
invokeSetSeries(1, { show: true });
const after = result.current.legendItemsMap;
expect(after).toEqual(before);
});
it('cancels pending visibility RAF on unmount', () => {
const initialItems: Record<number, LegendItem> = {
1: { seriesIndex: 1, label: 'CPU', show: true, color: '#f00' },
};
const { config, invokeSetSeries } = createMockConfig(initialItems);
// Override RAF to not immediately invoke callback so we can assert cancellation
requestAnimationFrameSpy.mockImplementationOnce(() => 42);
const { unmount } = renderHook(() => useLegendsSync({ config }));
invokeSetSeries(1, { show: false });
unmount();
expect(cancelAnimationFrameSpy).toHaveBeenCalledWith(42);
});
});

View File

@@ -1,218 +0,0 @@
import type uPlot from 'uplot';
import { Axis } from 'uplot';
import {
buildYAxisSizeCalculator,
calculateTextWidth,
getExistingAxisSize,
} from '../axis';
describe('axis utils', () => {
describe('calculateTextWidth', () => {
it('returns 0 when values are undefined or empty', () => {
const mockSelf = ({
ctx: {
measureText: jest.fn(),
font: '',
},
} as unknown) as uPlot;
// internally the type is string but it is an array of strings
const mockAxis: Axis = { font: (['12px sans-serif'] as unknown) as string };
expect(calculateTextWidth(mockSelf, mockAxis, undefined)).toBe(0);
expect(calculateTextWidth(mockSelf, mockAxis, [])).toBe(0);
});
it('returns 0 when longest value is empty string or axis has no usable font', () => {
const mockSelf = ({
ctx: {
measureText: jest.fn(),
font: '',
},
} as unknown) as uPlot;
const axisWithoutFont: Axis = { font: '' };
const axisWithEmptyFontArray: Axis = { font: '' };
expect(calculateTextWidth(mockSelf, axisWithoutFont, [''])).toBe(0);
expect(
calculateTextWidth(mockSelf, axisWithEmptyFontArray, ['a', 'bb']),
).toBe(0);
});
it('measures longest value using canvas context and axis font', () => {
const measureText = jest.fn(() => ({ width: 100 }));
const mockSelf = ({
ctx: {
font: '',
measureText,
},
} as unknown) as uPlot;
const mockAxis: Axis = { font: (['14px Arial'] as unknown) as string };
const values = ['1', '1234', '12'];
const dpr =
((global as unknown) as { devicePixelRatio?: number }).devicePixelRatio ??
1;
const result = calculateTextWidth(mockSelf, mockAxis, values);
expect(measureText).toHaveBeenCalledWith('1234');
expect(mockSelf.ctx.font).toBe('14px Arial');
expect(result).toBe(100 / dpr);
});
});
describe('getExistingAxisSize', () => {
it('returns internal _size when present', () => {
const axis: any = {
_size: 42,
size: 10,
};
const result = getExistingAxisSize({
uplotInstance: ({} as unknown) as uPlot,
axis,
axisIdx: 0,
cycleNum: 0,
});
expect(result).toBe(42);
});
it('invokes size function when _size is not set', () => {
const sizeFn = jest.fn(() => 24);
const axis: Axis = { size: sizeFn };
const instance = ({} as unknown) as uPlot;
const result = getExistingAxisSize({
uplotInstance: instance,
axis,
values: ['10', '20'],
axisIdx: 1,
cycleNum: 2,
});
expect(sizeFn).toHaveBeenCalledWith(instance, ['10', '20'], 1, 2);
expect(result).toBe(24);
});
it('returns numeric size or 0 when neither _size nor size are provided', () => {
const axisWithSize: Axis = { size: 16 };
const axisWithoutSize: Axis = {};
const instance = ({} as unknown) as uPlot;
expect(
getExistingAxisSize({
uplotInstance: instance,
axis: axisWithSize,
axisIdx: 0,
cycleNum: 0,
}),
).toBe(16);
expect(
getExistingAxisSize({
uplotInstance: instance,
axis: axisWithoutSize,
axisIdx: 0,
cycleNum: 0,
}),
).toBe(0);
});
});
describe('buildYAxisSizeCalculator', () => {
it('delegates to getExistingAxisSize when cycleNum > 1', () => {
const sizeCalculator = buildYAxisSizeCalculator(5);
const axis: any = {
_size: 80,
ticks: { size: 10 },
font: ['12px sans-serif'],
};
const measureText = jest.fn(() => ({ width: 60 }));
const self = ({
axes: [axis],
ctx: {
font: '',
measureText,
},
} as unknown) as uPlot;
if (typeof sizeCalculator === 'number') {
throw new Error('Size calculator is a number');
}
const result = sizeCalculator(self, ['10', '20'], 0, 2);
expect(result).toBe(80);
expect(measureText).not.toHaveBeenCalled();
});
it('computes size from ticks, gap and text width when cycleNum <= 1', () => {
const gap = 7;
const sizeCalculator = buildYAxisSizeCalculator(gap);
const axis: Axis = {
ticks: { size: 12 },
font: (['12px sans-serif'] as unknown) as string,
};
const measureText = jest.fn(() => ({ width: 50 }));
const self = ({
axes: [axis],
ctx: {
font: '',
measureText,
},
} as unknown) as uPlot;
const dpr =
((global as unknown) as { devicePixelRatio?: number }).devicePixelRatio ??
1;
const expected = Math.ceil(12 + gap + 50 / dpr);
if (typeof sizeCalculator === 'number') {
throw new Error('Size calculator is a number');
}
const result = sizeCalculator(self, ['short', 'the-longest'], 0, 0);
expect(measureText).toHaveBeenCalledWith('the-longest');
expect(result).toBe(expected);
});
it('uses 0 ticks size when ticks are not defined', () => {
const gap = 4;
const sizeCalculator = buildYAxisSizeCalculator(gap);
const axis: Axis = {
font: (['12px sans-serif'] as unknown) as string,
};
const measureText = jest.fn(() => ({ width: 40 }));
const self = ({
axes: [axis],
ctx: {
font: '',
measureText,
},
} as unknown) as uPlot;
const dpr =
((global as unknown) as { devicePixelRatio?: number }).devicePixelRatio ??
1;
const expected = Math.ceil(gap + 40 / dpr);
if (typeof sizeCalculator === 'number') {
throw new Error('Size calculator is a number');
}
const result = sizeCalculator(self, ['1', '123'], 0, 1);
expect(result).toBe(expected);
});
});
});

View File

@@ -1,80 +0,0 @@
import { Axis } from 'uplot';
/**
* Calculate text width for longest value
*/
export function calculateTextWidth(
self: uPlot,
axis: Axis,
values: string[] | undefined,
): number {
if (!values || values.length === 0) {
return 0;
}
// Find longest value
const longestVal = values.reduce(
(acc, val) => (val.length > acc.length ? val : acc),
'',
);
if (longestVal === '' || !axis.font?.[0]) {
return 0;
}
self.ctx.font = axis.font[0];
return self.ctx.measureText(longestVal).width / devicePixelRatio;
}
export function getExistingAxisSize({
uplotInstance,
axis,
values,
axisIdx,
cycleNum,
}: {
uplotInstance: uPlot;
axis: Axis;
values?: string[];
axisIdx: number;
cycleNum: number;
}): number {
const internalSize = (axis as { _size?: number })._size;
if (internalSize !== undefined) {
return internalSize;
}
const existingSize = axis.size;
if (typeof existingSize === 'function') {
return existingSize(uplotInstance, values ?? [], axisIdx, cycleNum);
}
return existingSize ?? 0;
}
export function buildYAxisSizeCalculator(gap: number): uPlot.Axis.Size {
return (
self: uPlot,
values: string[] | undefined,
axisIdx: number,
cycleNum: number,
): number => {
const axis = self.axes[axisIdx];
// Bail out, force convergence
if (cycleNum > 1) {
return getExistingAxisSize({
uplotInstance: self,
axis,
values,
axisIdx,
cycleNum,
});
}
let axisSize = (axis.ticks?.size ?? 0) + gap;
axisSize += calculateTextWidth(self, axis, values);
return Math.ceil(axisSize);
};
}

View File

@@ -1,25 +1,11 @@
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
export function resolveSeriesVisibility({
seriesIndex,
seriesShow,
seriesLabel,
seriesVisibilityState,
isAnySeriesHidden,
}: {
seriesIndex: number;
seriesShow: boolean | undefined | null;
seriesLabel: string;
seriesVisibilityState: SeriesVisibilityState | null;
isAnySeriesHidden: boolean;
}): boolean {
if (
isAnySeriesHidden &&
seriesVisibilityState?.visibility &&
seriesVisibilityState.labels.length > seriesIndex &&
seriesVisibilityState.labels[seriesIndex] === seriesLabel
) {
return seriesVisibilityState.visibility[seriesIndex] ?? false;
export function resolveSeriesVisibility(
label: string,
seriesShow: boolean | undefined | null,
visibilityMap: Map<string, boolean> | null,
isAnySeriesHidden: boolean,
): boolean {
if (isAnySeriesHidden) {
return visibilityMap?.get(label) ?? false;
}
return seriesShow ?? true;
}

View File

@@ -0,0 +1,44 @@
package middleware
import (
"log/slog"
"net/http"
"runtime/debug"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
)
// Recovery is a middleware that recovers from panics, logs the panic,
// and returns a 500 Internal Server Error.
type Recovery struct {
logger *slog.Logger
}
// NewRecovery creates a new Recovery middleware.
func NewRecovery(logger *slog.Logger) Wrapper {
return &Recovery{
logger: logger.With("pkg", "http-middleware-recovery"),
}
}
// Wrap is the middleware handler.
func (m *Recovery) Wrap(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer func() {
if err := recover(); err != nil {
m.logger.ErrorContext(
r.Context(),
"panic recovered",
"err", err, "stack", string(debug.Stack()),
)
render.Error(w, errors.NewInternalf(
errors.CodeInternal, "internal server error",
))
}
}()
next.ServeHTTP(w, r)
})
}

View File

@@ -13,6 +13,7 @@ import (
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
@@ -462,7 +463,7 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
return
}
@@ -507,7 +508,7 @@ func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
return
}

View File

@@ -19,6 +19,7 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/dustin/go-humanize"
@@ -171,7 +172,7 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID)
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
// since assign is idempotent multiple calls to assign won't cause issues in case of retries.
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
@@ -286,7 +287,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
if slices.Contains(integrationstypes.IntegrationUserEmails, user.Email) {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
}
@@ -300,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
}
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
// since revoke is idempotent multiple calls to revoke won't cause issues in case of retries
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err

View File

@@ -1,624 +0,0 @@
package cloudintegrations
import (
"context"
"fmt"
"net/url"
"slices"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/exp/maps"
)
var SupportedCloudProviders = []string{
"aws",
}
func validateCloudProviderName(name string) *model.ApiError {
if !slices.Contains(SupportedCloudProviders, name) {
return model.BadRequest(fmt.Errorf("invalid cloud provider: %s", name))
}
return nil
}
type Controller struct {
accountsRepo cloudProviderAccountsRepository
serviceConfigRepo ServiceConfigDatabase
}
func NewController(sqlStore sqlstore.SQLStore) (*Controller, error) {
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
}
serviceConfigRepo, err := newServiceConfigRepository(sqlStore)
if err != nil {
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
}
return &Controller{
accountsRepo: accountsRepo,
serviceConfigRepo: serviceConfigRepo,
}, nil
}
type ConnectedAccountsListResponse struct {
Accounts []types.Account `json:"accounts"`
}
func (c *Controller) ListConnectedAccounts(ctx context.Context, orgId string, cloudProvider string) (
*ConnectedAccountsListResponse, *model.ApiError,
) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgId, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
}
connectedAccounts := []types.Account{}
for _, a := range accountRecords {
connectedAccounts = append(connectedAccounts, a.Account())
}
return &ConnectedAccountsListResponse{
Accounts: connectedAccounts,
}, nil
}
type GenerateConnectionUrlRequest struct {
// Optional. To be specified for updates.
AccountId *string `json:"account_id,omitempty"`
AccountConfig types.AccountConfig `json:"account_config"`
AgentConfig SigNozAgentConfig `json:"agent_config"`
}
type SigNozAgentConfig struct {
// The region in which SigNoz agent should be installed.
Region string `json:"region"`
IngestionUrl string `json:"ingestion_url"`
IngestionKey string `json:"ingestion_key"`
SigNozAPIUrl string `json:"signoz_api_url"`
SigNozAPIKey string `json:"signoz_api_key"`
Version string `json:"version,omitempty"`
}
type GenerateConnectionUrlResponse struct {
AccountId string `json:"account_id"`
ConnectionUrl string `json:"connection_url"`
}
func (c *Controller) GenerateConnectionUrl(ctx context.Context, orgId string, cloudProvider string, req GenerateConnectionUrlRequest) (*GenerateConnectionUrlResponse, *model.ApiError) {
// Account connection with a simple connection URL may not be available for all providers.
if cloudProvider != "aws" {
return nil, model.BadRequest(fmt.Errorf("unsupported cloud provider: %s", cloudProvider))
}
account, apiErr := c.accountsRepo.upsert(
ctx, orgId, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
agentVersion := "v0.0.8"
if req.AgentConfig.Version != "" {
agentVersion = req.AgentConfig.Version
}
connectionUrl := fmt.Sprintf(
"https://%s.console.aws.amazon.com/cloudformation/home?region=%s#/stacks/quickcreate?",
req.AgentConfig.Region, req.AgentConfig.Region,
)
for qp, value := range map[string]string{
"param_SigNozIntegrationAgentVersion": agentVersion,
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
"param_SigNozAccountId": account.ID.StringValue(),
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
"param_IngestionKey": req.AgentConfig.IngestionKey,
"stackName": "signoz-integration",
"templateURL": fmt.Sprintf(
"https://signoz-integrations.s3.us-east-1.amazonaws.com/aws-quickcreate-template-%s.json",
agentVersion,
),
} {
connectionUrl += fmt.Sprintf("&%s=%s", qp, url.QueryEscape(value))
}
return &GenerateConnectionUrlResponse{
AccountId: account.ID.StringValue(),
ConnectionUrl: connectionUrl,
}, nil
}
type AccountStatusResponse struct {
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status types.AccountStatus `json:"status"`
}
func (c *Controller) GetAccountStatus(ctx context.Context, orgId string, cloudProvider string, accountId string) (
*AccountStatusResponse, *model.ApiError,
) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
if apiErr != nil {
return nil, apiErr
}
resp := AccountStatusResponse{
Id: account.ID.StringValue(),
CloudAccountId: account.AccountID,
Status: account.Status(),
}
return &resp, nil
}
type AgentCheckInRequest struct {
ID string `json:"account_id"`
AccountID string `json:"cloud_account_id"`
// Arbitrary cloud specific Agent data
Data map[string]any `json:"data,omitempty"`
}
type AgentCheckInResponse struct {
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
RemovedAt *time.Time `json:"removed_at"`
IntegrationConfig IntegrationConfigForAgent `json:"integration_config"`
}
type IntegrationConfigForAgent struct {
EnabledRegions []string `json:"enabled_regions"`
TelemetryCollectionStrategy *CompiledCollectionStrategy `json:"telemetry,omitempty"`
}
func (c *Controller) CheckInAsAgent(ctx context.Context, orgId string, cloudProvider string, req AgentCheckInRequest) (*AgentCheckInResponse, error) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
existingAccount, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, req.ID)
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
cloudProvider, req.AccountID, existingAccount.ID.StringValue(), cloudProvider, *existingAccount.AccountID,
))
}
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, orgId, cloudProvider, req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
cloudProvider, req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := types.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, apiErr := c.accountsRepo.upsert(
ctx, orgId, cloudProvider, &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
// prepare and return integration config to be consumed by agent
compiledStrategy, err := NewCompiledCollectionStrategy(cloudProvider)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't init telemetry collection strategy: %w", err,
))
}
agentConfig := IntegrationConfigForAgent{
EnabledRegions: []string{},
TelemetryCollectionStrategy: compiledStrategy,
}
if account.Config != nil && account.Config.EnabledRegions != nil {
agentConfig.EnabledRegions = account.Config.EnabledRegions
}
services, err := services.Map(cloudProvider)
if err != nil {
return nil, err
}
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, orgId, account.ID.StringValue(),
)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "couldn't get service configs for cloud account",
)
}
// accumulate config in a fixed order to ensure same config generated across runs
configuredServices := maps.Keys(svcConfigs)
slices.Sort(configuredServices)
for _, svcType := range configuredServices {
definition, ok := services[svcType]
if !ok {
continue
}
config := svcConfigs[svcType]
err := AddServiceStrategy(svcType, compiledStrategy, definition.Strategy, config)
if err != nil {
return nil, err
}
}
return &AgentCheckInResponse{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: agentConfig,
}, nil
}
type UpdateAccountConfigRequest struct {
Config types.AccountConfig `json:"config"`
}
func (c *Controller) UpdateAccountConfig(ctx context.Context, orgId string, cloudProvider string, accountId string, req UpdateAccountConfigRequest) (*types.Account, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
accountRecord, apiErr := c.accountsRepo.upsert(
ctx, orgId, cloudProvider, &accountId, &req.Config, nil, nil, nil,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
}
account := accountRecord.Account()
return &account, nil
}
func (c *Controller) DisconnectAccount(ctx context.Context, orgId string, cloudProvider string, accountId string) (*types.CloudIntegration, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
}
tsNow := time.Now()
account, apiErr = c.accountsRepo.upsert(
ctx, orgId, cloudProvider, &accountId, nil, nil, nil, &tsNow,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
}
return account, nil
}
type ListServicesResponse struct {
Services []ServiceSummary `json:"services"`
}
func (c *Controller) ListServices(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId *string,
) (*ListServicesResponse, *model.ApiError) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
definitions, apiErr := services.List(cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
}
svcConfigs := map[string]*types.CloudServiceConfig{}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
ctx, orgID, activeAccount.ID.StringValue(),
)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, "couldn't get service configs for cloud account",
)
}
}
summaries := []ServiceSummary{}
for _, def := range definitions {
summary := ServiceSummary{
Metadata: def.Metadata,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
}
return &ListServicesResponse{
Services: summaries,
}, nil
}
func (c *Controller) GetServiceDetails(
ctx context.Context,
orgID string,
cloudProvider string,
serviceId string,
cloudAccountId *string,
) (*ServiceDetails, error) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
definition, err := services.GetServiceDefinition(cloudProvider, serviceId)
if err != nil {
return nil, err
}
details := ServiceDetails{
Definition: *definition,
}
if cloudAccountId != nil {
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, *cloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't get active account")
}
config, apiErr := c.serviceConfigRepo.get(
ctx, orgID, activeAccount.ID.StringValue(), serviceId,
)
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
}
if config != nil {
details.Config = config
enabled := false
if config.Metrics != nil && config.Metrics.Enabled {
enabled = true
}
// add links to service dashboards, making them clickable.
for i, d := range definition.Assets.Dashboards {
dashboardUuid := c.dashboardUuid(
cloudProvider, serviceId, d.Id,
)
if enabled {
definition.Assets.Dashboards[i].Url = fmt.Sprintf("/dashboard/%s", dashboardUuid)
} else {
definition.Assets.Dashboards[i].Url = "" // to unset the in-memory URL if enabled once and disabled afterwards
}
}
}
}
return &details, nil
}
type UpdateServiceConfigRequest struct {
CloudAccountId string `json:"cloud_account_id"`
Config types.CloudServiceConfig `json:"config"`
}
func (u *UpdateServiceConfigRequest) Validate(def *services.Definition) error {
if def.Id != services.S3Sync && u.Config.Logs != nil && u.Config.Logs.S3Buckets != nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "s3 buckets can only be added to service-type[%s]", services.S3Sync)
} else if def.Id == services.S3Sync && u.Config.Logs != nil && u.Config.Logs.S3Buckets != nil {
for region := range u.Config.Logs.S3Buckets {
if _, found := ValidAWSRegions[region]; !found {
return errors.NewInvalidInputf(CodeInvalidCloudRegion, "invalid cloud region: %s", region)
}
}
}
return nil
}
type UpdateServiceConfigResponse struct {
Id string `json:"id"`
Config types.CloudServiceConfig `json:"config"`
}
func (c *Controller) UpdateServiceConfig(
ctx context.Context,
orgID string,
cloudProvider string,
serviceType string,
req *UpdateServiceConfigRequest,
) (*UpdateServiceConfigResponse, error) {
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
return nil, apiErr
}
// can only update config for a valid service.
definition, err := services.GetServiceDefinition(cloudProvider, serviceType)
if err != nil {
return nil, err
}
if err := req.Validate(definition); err != nil {
return nil, err
}
// can only update config for a connected cloud account id
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
ctx, orgID, cloudProvider, req.CloudAccountId,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
}
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
ctx, orgID, cloudProvider, req.CloudAccountId, serviceType, req.Config,
)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't update service config")
}
return &UpdateServiceConfigResponse{
Id: serviceType,
Config: *updatedConfig,
}, nil
}
// All dashboards that are available based on cloud integrations configuration
// across all cloud providers
func (c *Controller) AvailableDashboards(ctx context.Context, orgId valuer.UUID) ([]*dashboardtypes.Dashboard, *model.ApiError) {
allDashboards := []*dashboardtypes.Dashboard{}
for _, provider := range []string{"aws"} {
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, provider)
if apiErr != nil {
return nil, model.WrapApiError(
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
)
}
allDashboards = append(allDashboards, providerDashboards...)
}
return allDashboards, nil
}
func (c *Controller) AvailableDashboardsForCloudProvider(ctx context.Context, orgID valuer.UUID, cloudProvider string) ([]*dashboardtypes.Dashboard, *model.ApiError) {
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgID.StringValue(), cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
}
// for v0, service dashboards are only available when metrics are enabled.
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID != nil {
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
ctx, orgID.StringValue(), ar.ID.StringValue(),
)
if apiErr != nil {
return nil, apiErr
}
for svcId, config := range configsBySvcId {
if config.Metrics != nil && config.Metrics.Enabled {
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
}
}
}
}
allServices, apiErr := services.List(cloudProvider)
if apiErr != nil {
return nil, apiErr
}
svcDashboards := []*dashboardtypes.Dashboard{}
for _, svc := range allServices {
serviceDashboardsCreatedAt := servicesWithAvailableMetrics[svc.Id]
if serviceDashboardsCreatedAt != nil {
for _, d := range svc.Assets.Dashboards {
author := fmt.Sprintf("%s-integration", cloudProvider)
svcDashboards = append(svcDashboards, &dashboardtypes.Dashboard{
ID: c.dashboardUuid(cloudProvider, svc.Id, d.Id),
Locked: true,
Data: *d.Definition,
TimeAuditable: types.TimeAuditable{
CreatedAt: *serviceDashboardsCreatedAt,
UpdatedAt: *serviceDashboardsCreatedAt,
},
UserAuditable: types.UserAuditable{
CreatedBy: author,
UpdatedBy: author,
},
OrgID: orgID,
})
}
servicesWithAvailableMetrics[svc.Id] = nil
}
}
return svcDashboards, nil
}
func (c *Controller) GetDashboardById(ctx context.Context, orgId valuer.UUID, dashboardUuid string) (*dashboardtypes.Dashboard, *model.ApiError) {
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
if apiErr != nil {
return nil, apiErr
}
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, cloudProvider)
if apiErr != nil {
return nil, model.WrapApiError(apiErr, "couldn't list available dashboards")
}
for _, d := range allDashboards {
if d.ID == dashboardUuid {
return d, nil
}
}
return nil, model.NotFoundError(fmt.Errorf("couldn't find dashboard with uuid: %s", dashboardUuid))
}
func (c *Controller) dashboardUuid(
cloudProvider string, svcId string, dashboardId string,
) string {
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcId, dashboardId)
}
func (c *Controller) parseDashboardUuid(dashboardUuid string) (cloudProvider string, svcId string, dashboardId string, apiErr *model.ApiError) {
parts := strings.SplitN(dashboardUuid, "--", 4)
if len(parts) != 4 || parts[0] != "cloud-integration" {
return "", "", "", model.BadRequest(fmt.Errorf("invalid cloud integration dashboard id"))
}
return parts[1], parts[2], parts[3], nil
}
func (c *Controller) IsCloudIntegrationDashboardUuid(dashboardUuid string) bool {
_, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
return apiErr == nil
}

View File

@@ -0,0 +1,792 @@
package implawsprovider
import (
"context"
"fmt"
"log/slog"
"net/url"
"slices"
"sort"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/exp/maps"
)
var (
CodeInvalidAWSRegion = errors.MustNewCode("invalid_aws_region")
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
)
type awsProvider struct {
logger *slog.Logger
querier querier.Querier
accountsRepo integrationstore.CloudProviderAccountsRepository
serviceConfigRepo integrationstore.ServiceConfigDatabase
awsServiceDefinitions *services.AWSServicesProvider
}
func NewAWSCloudProvider(
logger *slog.Logger,
accountsRepo integrationstore.CloudProviderAccountsRepository,
serviceConfigRepo integrationstore.ServiceConfigDatabase,
querier querier.Querier,
) integrationstypes.CloudProvider {
awsServiceDefinitions, err := services.NewAWSCloudProviderServices()
if err != nil {
panic("failed to initialize AWS service definitions: " + err.Error())
}
return &awsProvider{
logger: logger,
querier: querier,
accountsRepo: accountsRepo,
serviceConfigRepo: serviceConfigRepo,
awsServiceDefinitions: awsServiceDefinitions,
}
}
func (a *awsProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
accountRecord, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAccountStatus{
Id: accountRecord.ID.String(),
CloudAccountId: accountRecord.AccountID,
Status: accountRecord.Status(),
}, nil
}
func (a *awsProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
if err != nil {
return nil, err
}
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
for _, r := range accountRecords {
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
}
return &integrationstypes.GettableConnectedAccountsList{
Accounts: connectedAccounts,
}, nil
}
func (a *awsProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
// agent can't check in unless the account is already created
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
if err != nil {
return nil, err
}
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "can't check in with new %s account id %s for account %s with existing %s id %s",
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
*existingAccount.AccountID)
}
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput,
"can't check in to %s account %s with id %s. already connected with id %s",
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue())
}
agentReport := integrationstypes.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if err != nil {
return nil, err
}
agentConfig, err := a.getAWSAgentConfig(ctx, account)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAWSAgentCheckIn{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: *agentConfig,
}, nil
}
func (a *awsProvider) getAWSAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AWSAgentIntegrationConfig, error) {
// prepare and return integration config to be consumed by agent
agentConfig := &integrationstypes.AWSAgentIntegrationConfig{
EnabledRegions: []string{},
TelemetryCollectionStrategy: &integrationstypes.AWSCollectionStrategy{
Provider: a.GetName(),
AWSMetrics: &integrationstypes.AWSMetricsStrategy{},
AWSLogs: &integrationstypes.AWSLogsStrategy{},
S3Buckets: map[string][]string{},
},
}
accountConfig := new(integrationstypes.AWSAccountConfig)
err := accountConfig.Unmarshal([]byte(account.Config))
if err != nil {
return nil, err
}
if accountConfig != nil && accountConfig.EnabledRegions != nil {
agentConfig.EnabledRegions = accountConfig.EnabledRegions
}
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
ctx, account.OrgID, account.ID.StringValue(),
)
if err != nil {
return nil, err
}
// accumulate config in a fixed order to ensure same config generated across runs
configuredServices := maps.Keys(svcConfigs)
slices.Sort(configuredServices)
for _, svcType := range configuredServices {
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, svcType)
if err != nil {
continue
}
config := svcConfigs[svcType]
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
continue
}
if serviceConfig.Logs != nil && serviceConfig.Logs.Enabled {
if svcType == integrationstypes.S3Sync {
// S3 bucket sync; No cloudwatch logs are appended for this service type;
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
agentConfig.TelemetryCollectionStrategy.S3Buckets = serviceConfig.Logs.S3Buckets
} else if definition.Strategy.AWSLogs != nil { // services that includes a logs subscription
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions = append(
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions,
definition.Strategy.AWSLogs.Subscriptions...,
)
}
}
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled && definition.Strategy.AWSMetrics != nil {
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters = append(
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters,
definition.Strategy.AWSMetrics.StreamFilters...,
)
}
}
return agentConfig, nil
}
func (a *awsProvider) GetName() integrationstypes.CloudProviderType {
return integrationstypes.CloudProviderAWS
}
func (a *awsProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
svcConfigs := make(map[string]*integrationstypes.AWSCloudServiceConfig)
if cloudAccountID != nil {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
if err != nil {
return nil, err
}
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.String())
if err != nil {
return nil, err
}
for svcType, config := range serviceConfigs {
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
svcConfigs[svcType] = serviceConfig
}
}
summaries := make([]integrationstypes.AWSServiceSummary, 0)
definitions, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, err
}
for _, def := range definitions {
summary := integrationstypes.AWSServiceSummary{
DefinitionMetadata: def.DefinitionMetadata,
Config: nil,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
}
sort.Slice(summaries, func(i, j int) bool {
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
})
return &integrationstypes.GettableAWSServices{
Services: summaries,
}, nil
}
func (a *awsProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
details := new(integrationstypes.GettableAWSServiceDetails)
awsDefinition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, err
}
details.AWSServiceDefinition = *awsDefinition
details.Strategy.Provider = a.GetName()
if req.CloudAccountID == nil {
return details, nil
}
config, err := a.getServiceConfig(ctx, &details.AWSServiceDefinition, req.OrgID, a.GetName().String(), req.ServiceId, *req.CloudAccountID)
if err != nil {
return nil, err
}
if config == nil {
return details, nil
}
details.Config = config
connectionStatus, err := a.getServiceConnectionStatus(
ctx,
*req.CloudAccountID,
req.OrgID,
&details.AWSServiceDefinition,
config,
)
if err != nil {
return nil, err
}
details.ConnectionStatus = connectionStatus
return details, nil
}
func (a *awsProvider) getServiceConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID valuer.UUID,
def *integrationstypes.AWSServiceDefinition,
serviceConfig *integrationstypes.AWSCloudServiceConfig,
) (*integrationstypes.ServiceConnectionStatus, error) {
if def.Strategy == nil {
return nil, nil
}
resp := new(integrationstypes.ServiceConnectionStatus)
wg := sync.WaitGroup{}
wg.Add(2)
if def.Strategy.AWSMetrics != nil && serviceConfig.Metrics.Enabled {
go func() {
defer func() {
if r := recover(); r != nil {
a.logger.ErrorContext(
ctx, "panic while getting service metrics connection status",
"error", r,
"service", def.DefinitionMetadata.Id,
)
}
}()
defer wg.Done()
status, _ := a.getServiceMetricsConnectionStatus(ctx, cloudAccountID, orgID, def)
resp.Metrics = status
}()
}
if def.Strategy.AWSLogs != nil && serviceConfig.Logs.Enabled {
go func() {
defer func() {
if r := recover(); r != nil {
a.logger.ErrorContext(
ctx, "panic while getting service logs connection status",
"error", r,
"service", def.DefinitionMetadata.Id,
)
}
}()
defer wg.Done()
status, _ := a.getServiceLogsConnectionStatus(ctx, cloudAccountID, orgID, def)
resp.Logs = status
}()
}
wg.Wait()
return resp, nil
}
func (a *awsProvider) getServiceMetricsConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID valuer.UUID,
def *integrationstypes.AWSServiceDefinition,
) ([]*integrationstypes.SignalConnectionStatus, error) {
if def.Strategy == nil ||
len(def.Strategy.AWSMetrics.StreamFilters) < 1 ||
len(def.DataCollected.Metrics) < 1 {
return nil, nil
}
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
for _, category := range def.IngestionStatusCheck.Metrics {
queries := make([]qbtypes.QueryEnvelope, 0)
for _, check := range category.Checks {
filterExpression := fmt.Sprintf(`cloud.provider="aws" AND cloud.account.id="%s"`, cloudAccountID)
f := ""
for _, attribute := range check.Attributes {
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
if attribute.Value != "" {
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
}
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
}
queries = append(queries, qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: valuer.GenerateUUID().String(),
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{{
MetricName: check.Key,
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationAvg,
}},
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
},
})
}
resp, err := a.querier.QueryRange(ctx, orgID, &qbtypes.QueryRangeRequest{
SchemaVersion: "v5",
Start: uint64(time.Now().Add(-time.Hour).UnixMilli()),
End: uint64(time.Now().UnixMilli()),
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
})
if err != nil {
a.logger.DebugContext(ctx,
"error querying for service metrics connection status",
"error", err,
"service", def.DefinitionMetadata.Id,
)
continue
}
if resp != nil && len(resp.Data.Results) < 1 {
continue
}
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
if !ok {
continue
}
if queryResponse == nil ||
len(queryResponse.Aggregations) < 1 ||
len(queryResponse.Aggregations[0].Series) < 1 ||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
continue
}
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
CategoryID: category.Category,
CategoryDisplayName: category.DisplayName,
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
LastReceivedFrom: "signoz-aws-integration",
})
}
return statusResp, nil
}
func (a *awsProvider) getServiceLogsConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID valuer.UUID,
def *integrationstypes.AWSServiceDefinition,
) ([]*integrationstypes.SignalConnectionStatus, error) {
if def.Strategy == nil ||
len(def.Strategy.AWSLogs.Subscriptions) < 1 ||
len(def.DataCollected.Logs) < 1 {
return nil, nil
}
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
for _, category := range def.IngestionStatusCheck.Logs {
queries := make([]qbtypes.QueryEnvelope, 0)
for _, check := range category.Checks {
filterExpression := fmt.Sprintf(`cloud.account.id="%s"`, cloudAccountID)
f := ""
for _, attribute := range check.Attributes {
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
if attribute.Value != "" {
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
}
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
}
queries = append(queries, qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Name: valuer.GenerateUUID().String(),
Signal: telemetrytypes.SignalLogs,
Aggregations: []qbtypes.LogAggregation{{
Expression: "count()",
}},
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
Limit: 10,
Offset: 0,
},
})
}
resp, err := a.querier.QueryRange(ctx, orgID, &qbtypes.QueryRangeRequest{
SchemaVersion: "v1",
Start: uint64(time.Now().Add(-time.Hour * 1).UnixMilli()),
End: uint64(time.Now().UnixMilli()),
RequestType: qbtypes.RequestTypeTimeSeries,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
})
if err != nil {
a.logger.DebugContext(ctx,
"error querying for service logs connection status",
"error", err,
"service", def.DefinitionMetadata.Id,
)
continue
}
if resp != nil && len(resp.Data.Results) < 1 {
continue
}
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
if !ok {
continue
}
if queryResponse == nil ||
len(queryResponse.Aggregations) < 1 ||
len(queryResponse.Aggregations[0].Series) < 1 ||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
continue
}
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
CategoryID: category.Category,
CategoryDisplayName: category.DisplayName,
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
LastReceivedFrom: "signoz-aws-integration",
})
}
return statusResp, nil
}
func (a *awsProvider) getServiceConfig(ctx context.Context,
def *integrationstypes.AWSServiceDefinition, orgID valuer.UUID, cloudProvider, serviceId, cloudAccountId string,
) (*integrationstypes.AWSCloudServiceConfig, error) {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID.String(), cloudProvider, cloudAccountId)
if err != nil {
return nil, err
}
config, err := a.serviceConfigRepo.Get(ctx, orgID.String(), activeAccount.ID.StringValue(), serviceId)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil, nil
}
return nil, err
}
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if config != nil && serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
def.PopulateDashboardURLs(serviceId)
}
return serviceConfig, nil
}
func (a *awsProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
if err != nil {
return nil, err
}
// for now service dashboards are only available when metrics are enabled.
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID != nil {
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
if err != nil {
return nil, err
}
for svcId, config := range configsBySvcId {
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
}
}
}
}
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
allServices, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list aws service definitions")
}
for _, svc := range allServices {
serviceDashboardsCreatedAt, ok := servicesWithAvailableMetrics[svc.Id]
if ok {
svcDashboards = integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)
servicesWithAvailableMetrics[svc.Id] = nil
}
}
return svcDashboards, nil
}
func (a *awsProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
if err != nil {
return nil, err
}
for _, d := range allDashboards {
if d.ID == req.ID {
return d, nil
}
}
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
}
func (a *awsProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
connection := new(integrationstypes.PostableAWSConnectionUrl)
err := connection.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if connection.AccountConfig != nil {
for _, region := range connection.AccountConfig.EnabledRegions {
if integrationstypes.ValidAWSRegions[region] {
continue
}
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
}
}
config, err := connection.AccountConfig.Marshal()
if err != nil {
return nil, err
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, integrationstypes.CloudProviderAWS.String(), nil, config,
nil, nil, nil,
)
if err != nil {
return nil, err
}
agentVersion := "v0.0.8"
if connection.AgentConfig.Version != "" {
agentVersion = connection.AgentConfig.Version
}
baseURL := fmt.Sprintf("https://%s.console.aws.amazon.com/cloudformation/home",
connection.AgentConfig.Region)
u, _ := url.Parse(baseURL)
q := u.Query()
q.Set("region", connection.AgentConfig.Region)
u.Fragment = "/stacks/quickcreate"
u.RawQuery = q.Encode()
q = u.Query()
q.Set("stackName", "signoz-integration")
q.Set("templateURL", fmt.Sprintf("https://signoz-integrations.s3.us-east-1.amazonaws.com/aws-quickcreate-template-%s.json", agentVersion))
q.Set("param_SigNozIntegrationAgentVersion", agentVersion)
q.Set("param_SigNozApiUrl", connection.AgentConfig.SigNozAPIUrl)
q.Set("param_SigNozApiKey", connection.AgentConfig.SigNozAPIKey)
q.Set("param_SigNozAccountId", account.ID.StringValue())
q.Set("param_IngestionUrl", connection.AgentConfig.IngestionUrl)
q.Set("param_IngestionKey", connection.AgentConfig.IngestionKey)
return &integrationstypes.GettableAWSConnectionUrl{
AccountId: account.ID.StringValue(),
ConnectionUrl: u.String() + "?&" + q.Encode(), // this format is required by AWS
}, nil
}
func (a *awsProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, err
}
serviceConfig := new(integrationstypes.PatchableAWSCloudServiceConfig)
err = serviceConfig.Unmarshal(req.Config)
if err != nil {
return nil, err
}
if err = serviceConfig.Config.Validate(definition); err != nil {
return nil, err
}
// can only update config for a connected cloud account id
_, err = a.accountsRepo.GetConnectedCloudAccount(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
)
if err != nil {
return nil, err
}
serviceConfigBytes, err := serviceConfig.Config.Marshal()
if err != nil {
return nil, err
}
updatedConfig, err := a.serviceConfigRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
)
if err != nil {
return nil, err
}
if err = serviceConfig.Config.Unmarshal(updatedConfig); err != nil {
return nil, err
}
return &integrationstypes.PatchServiceConfigResponse{
ServiceId: req.ServiceId,
Config: serviceConfig.Config,
}, nil
}
func (a *awsProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
config := new(integrationstypes.PatchableAWSAccountConfig)
err := config.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if config.Config == nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "account config can't be null")
}
for _, region := range config.Config.EnabledRegions {
if integrationstypes.ValidAWSRegions[region] {
continue
}
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
}
configBytes, err := config.Config.Marshal()
if err != nil {
return nil, err
}
// account must exist to update config, but it doesn't need to be connected
_, err = a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
if err != nil {
return nil, err
}
accountRecord, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
)
if err != nil {
return nil, err
}
return accountRecord.Account(a.GetName()), nil
}
func (a *awsProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
tsNow := time.Now()
account, err = a.accountsRepo.Upsert(
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
)
if err != nil {
return nil, err
}
return account, nil
}

View File

@@ -1,94 +1 @@
package cloudintegrations
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/types"
)
type ServiceSummary struct {
services.Metadata
Config *types.CloudServiceConfig `json:"config"`
}
type ServiceDetails struct {
services.Definition
Config *types.CloudServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
type LogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type MetricsConfig struct {
Enabled bool `json:"enabled"`
}
type ServiceConnectionStatus struct {
Logs *SignalConnectionStatus `json:"logs"`
Metrics *SignalConnectionStatus `json:"metrics"`
}
type SignalConnectionStatus struct {
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
type CompiledCollectionStrategy = services.CollectionStrategy
func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy, error) {
if provider == "aws" {
return &CompiledCollectionStrategy{
Provider: "aws",
AWSMetrics: &services.AWSMetricsStrategy{},
AWSLogs: &services.AWSLogsStrategy{},
}, nil
}
return nil, errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", provider)
}
// Helper for accumulating strategies for enabled services.
func AddServiceStrategy(serviceType string, cs *CompiledCollectionStrategy,
definitionStrat *services.CollectionStrategy, config *types.CloudServiceConfig) error {
if definitionStrat.Provider != cs.Provider {
return errors.NewInternalf(CodeMismatchCloudProvider, "can't add %s service strategy to compiled strategy for %s",
definitionStrat.Provider, cs.Provider)
}
if cs.Provider == "aws" {
if config.Logs != nil && config.Logs.Enabled {
if serviceType == services.S3Sync {
// S3 bucket sync; No cloudwatch logs are appended for this service type;
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
cs.S3Buckets = config.Logs.S3Buckets
} else if definitionStrat.AWSLogs != nil { // services that includes a logs subscription
cs.AWSLogs.Subscriptions = append(
cs.AWSLogs.Subscriptions,
definitionStrat.AWSLogs.Subscriptions...,
)
}
}
if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
cs.AWSMetrics.StreamFilters = append(
cs.AWSMetrics.StreamFilters,
definitionStrat.AWSMetrics.StreamFilters...,
)
}
return nil
}
return errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cs.Provider)
}

View File

@@ -0,0 +1,27 @@
package cloudintegrations
import (
"log/slog"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implawsprovider"
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
)
func NewCloudProviderRegistry(
logger *slog.Logger,
store sqlstore.SQLStore,
querier querier.Querier,
) map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider {
registry := make(map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider)
accountsRepo := integrationstore.NewCloudProviderAccountsRepository(store)
serviceConfigRepo := integrationstore.NewServiceConfigRepository(store)
awsProviderImpl := implawsprovider.NewAWSCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
registry[integrationstypes.CloudProviderAWS] = awsProviderImpl
return registry
}

View File

@@ -7,6 +7,24 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_ApplicationELB_ConsumedLCUs_count",
"attributes": []
},
{
"key": "aws_ApplicationELB_ProcessedBytes_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,6 +7,75 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "rest_api",
"display_name": "REST API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiName",
"operator": "EXISTS",
"value": ""
}
]
}
]
},
{
"category": "http_api",
"display_name": "HTTP API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiId",
"operator": "EXISTS",
"value": ""
}
]
}
]
},
{
"category": "websocket_api",
"display_name": "Websocket API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiId",
"operator": "EXISTS",
"value": ""
}
]
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "API-Gateway%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -148,6 +217,146 @@
"name": "aws_ApiGateway_Latency_sum",
"unit": "Milliseconds",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_count",
"unit": "Count",
"type": "Gauge"
}
],
"logs": [

View File

@@ -7,6 +7,24 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_DynamoDB_AccountMaxReads_max",
"attributes": []
},
{
"key": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -391,4 +409,4 @@
}
]
}
}
}

View File

@@ -7,6 +7,24 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_EC2_CPUUtilization_max",
"attributes": []
},
{
"key": "aws_EC2_NetworkIn_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -515,4 +533,4 @@
}
]
}
}
}

View File

@@ -7,6 +7,81 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "overview",
"display_name": "Overview",
"checks": [
{
"key": "aws_ECS_CPUUtilization_max",
"attributes": []
},
{
"key": "aws_ECS_MemoryUtilization_max",
"attributes": []
}
]
},
{
"category": "containerinsights",
"display_name": "Container Insights",
"checks": [
{
"key": "aws_ECS_ContainerInsights_NetworkRxBytes_max",
"attributes": []
},
{
"key": "aws_ECS_ContainerInsights_StorageReadBytes_max",
"attributes": []
}
]
},
{
"category": "enhanced_containerinsights",
"display_name": "Enhanced Container Insights",
"checks": [
{
"key": "aws_ECS_ContainerInsights_ContainerCpuUtilization_max",
"attributes": [
{
"name": "TaskId",
"operator": "EXISTS",
"value": ""
}
]
},
{
"key": "aws_ECS_ContainerInsights_TaskMemoryUtilization_max",
"attributes": [
{
"name": "TaskId",
"operator": "EXISTS",
"value": ""
}
]
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "%/ecs/%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,6 +7,20 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_ElastiCache_CacheHitRate_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics":[
{
@@ -1928,7 +1942,7 @@
"unit": "Percent",
"type": "Gauge",
"description": ""
}
}
]
},
"telemetry_collection_strategy": {
@@ -1951,4 +1965,4 @@
}
]
}
}
}

View File

@@ -7,6 +7,37 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_Lambda_Invocations_sum",
"attributes": []
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "/aws/lambda%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,6 +7,20 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_Kafka_KafkaDataLogsDiskUsed_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -1088,4 +1102,3 @@
]
}
}

View File

@@ -7,6 +7,37 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_RDS_CPUUtilization_max",
"attributes": []
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "resources.aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "/aws/rds%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -800,4 +831,4 @@
}
]
}
}
}

View File

@@ -7,6 +7,20 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_SNS_NumberOfMessagesPublished_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -127,4 +141,4 @@
}
]
}
}
}

View File

@@ -7,6 +7,24 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_SQS_SentMessageSize_max",
"attributes": []
},
{
"key": "aws_SQS_NumberOfMessagesSent_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -247,4 +265,4 @@
}
]
}
}
}

View File

@@ -1,91 +0,0 @@
package services
import (
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
)
type Metadata struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
}
type Definition struct {
Metadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *CollectionStrategy `json:"telemetry_collection_strategy"`
}
type Assets struct {
Dashboards []Dashboard `json:"dashboards"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
type DataCollected struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type CollectionStrategy struct {
Provider string `json:"provider"`
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type
}
type AWSMetricsStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
StreamFilters []struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
} `json:"cloudwatch_metric_stream_filters"`
}
type AWSLogsStrategy struct {
Subscriptions []struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
} `json:"cloudwatch_logs_subscriptions"`
}
type Dashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
}

View File

@@ -2,128 +2,90 @@ package services
import (
"bytes"
"context"
"embed"
"encoding/json"
"fmt"
"io/fs"
"path"
"sort"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
koanfJson "github.com/knadh/koanf/parsers/json"
"golang.org/x/exp/maps"
)
const (
S3Sync = "s3sync"
)
var (
CodeUnsupportedCloudProvider = errors.MustNewCode("unsupported_cloud_provider")
CodeUnsupportedServiceType = errors.MustNewCode("unsupported_service_type")
CodeServiceDefinitionNotFound = errors.MustNewCode("service_definition_not_dound")
)
func List(cloudProvider string) ([]Definition, *model.ApiError) {
cloudServices, found := supportedServices[cloudProvider]
if !found || cloudServices == nil {
return nil, model.NotFoundError(fmt.Errorf(
"unsupported cloud provider: %s", cloudProvider,
))
type (
AWSServicesProvider struct {
definitions map[string]*integrationstypes.AWSServiceDefinition
}
)
services := maps.Values(cloudServices)
sort.Slice(services, func(i, j int) bool {
return services[i].Id < services[j].Id
})
return services, nil
func (a *AWSServicesProvider) ListServiceDefinitions(ctx context.Context) (map[string]*integrationstypes.AWSServiceDefinition, error) {
return a.definitions, nil
}
func Map(cloudProvider string) (map[string]Definition, error) {
cloudServices, found := supportedServices[cloudProvider]
if !found || cloudServices == nil {
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cloudProvider)
func (a *AWSServicesProvider) GetServiceDefinition(ctx context.Context, serviceName string) (*integrationstypes.AWSServiceDefinition, error) {
def, ok := a.definitions[serviceName]
if !ok {
return nil, errors.NewNotFoundf(CodeServiceDefinitionNotFound, "aws service definition not found: %s", serviceName)
}
return cloudServices, nil
return def, nil
}
func GetServiceDefinition(cloudProvider, serviceType string) (*Definition, error) {
cloudServices := supportedServices[cloudProvider]
if cloudServices == nil {
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cloudProvider)
}
svc, exists := cloudServices[serviceType]
if !exists {
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedServiceType, "%s service not found: %s", cloudProvider, serviceType)
}
return &svc, nil
}
// End of API. Logic for reading service definition files follows
// Service details read from ./serviceDefinitions
// { "providerName": { "service_id": {...}} }
var supportedServices map[string]map[string]Definition
func init() {
err := readAllServiceDefinitions()
func NewAWSCloudProviderServices() (*AWSServicesProvider, error) {
definitions, err := readAllServiceDefinitions(integrationstypes.CloudProviderAWS)
if err != nil {
panic(fmt.Errorf(
"couldn't read cloud service definitions: %w", err,
))
return nil, err
}
serviceDefinitions := make(map[string]*integrationstypes.AWSServiceDefinition)
for id, def := range definitions {
typedDef, ok := def.(*integrationstypes.AWSServiceDefinition)
if !ok {
return nil, fmt.Errorf("invalid type for AWS service definition %s", id)
}
serviceDefinitions[id] = typedDef
}
return &AWSServicesProvider{
definitions: serviceDefinitions,
}, nil
}
//go:embed definitions/*
var definitionFiles embed.FS
func readAllServiceDefinitions() error {
supportedServices = map[string]map[string]Definition{}
func readAllServiceDefinitions(cloudProvider valuer.String) (map[string]any, error) {
rootDirName := "definitions"
cloudProviderDirs, err := fs.ReadDir(definitionFiles, rootDirName)
cloudProviderDirPath := path.Join(rootDirName, cloudProvider.String())
cloudServices, err := readServiceDefinitionsFromDir(cloudProvider, cloudProviderDirPath)
if err != nil {
return fmt.Errorf("couldn't read dirs in %s: %w", rootDirName, err)
return nil, err
}
for _, d := range cloudProviderDirs {
if !d.IsDir() {
continue
}
cloudProvider := d.Name()
cloudProviderDirPath := path.Join(rootDirName, cloudProvider)
cloudServices, err := readServiceDefinitionsFromDir(cloudProvider, cloudProviderDirPath)
if err != nil {
return fmt.Errorf("couldn't read %s service definitions: %w", cloudProvider, err)
}
if len(cloudServices) < 1 {
return fmt.Errorf("no %s services could be read", cloudProvider)
}
supportedServices[cloudProvider] = cloudServices
if len(cloudServices) < 1 {
return nil, errors.NewInternalf(errors.CodeInternal, "no service definitions found in %s", cloudProviderDirPath)
}
return nil
return cloudServices, nil
}
func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath string) (
map[string]Definition, error,
) {
func readServiceDefinitionsFromDir(cloudProvider valuer.String, cloudProviderDirPath string) (map[string]any, error) {
svcDefDirs, err := fs.ReadDir(definitionFiles, cloudProviderDirPath)
if err != nil {
return nil, fmt.Errorf("couldn't list integrations dirs: %w", err)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't list integrations dirs")
}
svcDefs := map[string]Definition{}
svcDefs := make(map[string]any)
for _, d := range svcDefDirs {
if !d.IsDir() {
@@ -133,103 +95,71 @@ func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath st
svcDirPath := path.Join(cloudProviderDirPath, d.Name())
s, err := readServiceDefinition(cloudProvider, svcDirPath)
if err != nil {
return nil, fmt.Errorf("couldn't read svc definition for %s: %w", d.Name(), err)
return nil, err
}
_, exists := svcDefs[s.Id]
_, exists := svcDefs[s.GetId()]
if exists {
return nil, fmt.Errorf(
"duplicate service definition for id %s at %s", s.Id, d.Name(),
)
return nil, errors.NewInternalf(errors.CodeInternal, "duplicate service definition for id %s at %s", s.GetId(), d.Name())
}
svcDefs[s.Id] = *s
svcDefs[s.GetId()] = s
}
return svcDefs, nil
}
func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition, error) {
func readServiceDefinition(cloudProvider valuer.String, svcDirpath string) (integrationstypes.Definition, error) {
integrationJsonPath := path.Join(svcDirpath, "integration.json")
serializedSpec, err := definitionFiles.ReadFile(integrationJsonPath)
if err != nil {
return nil, fmt.Errorf(
"couldn't find integration.json in %s: %w",
svcDirpath, err,
)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read integration definition in %s", svcDirpath)
}
integrationSpec, err := koanfJson.Parser().Unmarshal(serializedSpec)
if err != nil {
return nil, fmt.Errorf(
"couldn't parse integration.json from %s: %w",
integrationJsonPath, err,
)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't parse integration definition in %s", svcDirpath)
}
hydrated, err := integrations.HydrateFileUris(
integrationSpec, definitionFiles, svcDirpath,
)
hydrated, err := integrations.HydrateFileUris(integrationSpec, definitionFiles, svcDirpath)
if err != nil {
return nil, fmt.Errorf(
"couldn't hydrate files referenced in service definition %s: %w",
integrationJsonPath, err,
)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't hydrate integration definition in %s", svcDirpath)
}
hydratedSpec := hydrated.(map[string]any)
serviceDef, err := ParseStructWithJsonTagsFromMap[Definition](hydratedSpec)
if err != nil {
return nil, fmt.Errorf(
"couldn't parse hydrated JSON spec read from %s: %w",
integrationJsonPath, err,
)
var serviceDef integrationstypes.Definition
switch cloudProvider {
case integrationstypes.CloudProviderAWS:
serviceDef = &integrationstypes.AWSServiceDefinition{}
default:
// ideally this shouldn't happen hence throwing internal error
return nil, errors.NewInternalf(errors.CodeInternal, "unsupported cloud provider: %s", cloudProvider)
}
err = validateServiceDefinition(serviceDef)
err = parseStructWithJsonTagsFromMap(hydratedSpec, serviceDef)
if err != nil {
return nil, fmt.Errorf("invalid service definition %s: %w", serviceDef.Id, err)
return nil, err
}
err = serviceDef.Validate()
if err != nil {
return nil, err
}
serviceDef.Strategy.Provider = cloudProvider
return serviceDef, nil
}
func validateServiceDefinition(s *Definition) error {
// Validate dashboard data
seenDashboardIds := map[string]interface{}{}
for _, dd := range s.Assets.Dashboards {
if _, seen := seenDashboardIds[dd.Id]; seen {
return fmt.Errorf("multiple dashboards found with id %s", dd.Id)
}
seenDashboardIds[dd.Id] = nil
}
if s.Strategy == nil {
return fmt.Errorf("telemetry_collection_strategy is required")
}
// potentially more to follow
return nil
}
func ParseStructWithJsonTagsFromMap[StructType any](data map[string]any) (
*StructType, error,
) {
func parseStructWithJsonTagsFromMap(data map[string]any, target interface{}) error {
mapJson, err := json.Marshal(data)
if err != nil {
return nil, fmt.Errorf("couldn't marshal map to json: %w", err)
return errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal service definition json data")
}
var res StructType
decoder := json.NewDecoder(bytes.NewReader(mapJson))
decoder.DisallowUnknownFields()
err = decoder.Decode(&res)
err = decoder.Decode(target)
if err != nil {
return nil, fmt.Errorf("couldn't unmarshal json back to struct: %w", err)
return errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal service definition json data")
}
return &res, nil
return nil
}

View File

@@ -1,35 +1,3 @@
package services
import (
"testing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/stretchr/testify/require"
)
func TestAvailableServices(t *testing.T) {
require := require.New(t)
// should be able to list available services.
_, apiErr := List("bad-cloud-provider")
require.NotNil(apiErr)
require.Equal(model.ErrorNotFound, apiErr.Type())
awsSvcs, apiErr := List("aws")
require.Nil(apiErr)
require.Greater(len(awsSvcs), 0)
// should be able to get details of a service
_, err := GetServiceDefinition(
"aws", "bad-service-id",
)
require.NotNil(err)
require.True(errors.Ast(err, errors.TypeNotFound))
svc, err := GetServiceDefinition(
"aws", awsSvcs[0].Id,
)
require.Nil(err)
require.Equal(*svc, awsSvcs[0])
}
// TODO: add more tests for services package

View File

@@ -1,55 +1,57 @@
package cloudintegrations
package store
import (
"context"
"database/sql"
"fmt"
"log/slog"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type cloudProviderAccountsRepository interface {
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
var (
CodeCloudIntegrationAccountNotFound errors.Code = errors.MustNewCode("cloud_integration_account_not_found")
)
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
type CloudProviderAccountsRepository interface {
ListConnected(ctx context.Context, orgId string, provider string) ([]integrationstypes.CloudIntegration, error)
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
Get(ctx context.Context, orgId string, provider string, id string) (*integrationstypes.CloudIntegration, error)
GetConnectedCloudAccount(ctx context.Context, orgId, provider string, accountID string) (*integrationstypes.CloudIntegration, error)
// Insert an account or update it by (cloudProvider, id)
// for specified non-empty fields
upsert(
Upsert(
ctx context.Context,
orgId string,
provider string,
id *string,
config *types.AccountConfig,
config []byte,
accountId *string,
agentReport *types.AgentReport,
agentReport *integrationstypes.AgentReport,
removedAt *time.Time,
) (*types.CloudIntegration, *model.ApiError)
) (*integrationstypes.CloudIntegration, error)
}
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
*cloudProviderAccountsSQLRepository, error,
) {
return &cloudProviderAccountsSQLRepository{
store: store,
}, nil
func NewCloudProviderAccountsRepository(store sqlstore.SQLStore) CloudProviderAccountsRepository {
return &cloudProviderAccountsSQLRepository{store: store}
}
type cloudProviderAccountsSQLRepository struct {
store sqlstore.SQLStore
}
func (r *cloudProviderAccountsSQLRepository) listConnected(
func (r *cloudProviderAccountsSQLRepository) ListConnected(
ctx context.Context, orgId string, cloudProvider string,
) ([]types.CloudIntegration, *model.ApiError) {
accounts := []types.CloudIntegration{}
) ([]integrationstypes.CloudIntegration, error) {
accounts := []integrationstypes.CloudIntegration{}
err := r.store.BunDB().NewSelect().
Model(&accounts).
@@ -62,18 +64,17 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query connected cloud accounts: %w", err,
))
slog.ErrorContext(ctx, "error querying connected cloud accounts", "error", err)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not query connected cloud accounts")
}
return accounts, nil
}
func (r *cloudProviderAccountsSQLRepository) get(
func (r *cloudProviderAccountsSQLRepository) Get(
ctx context.Context, orgId string, provider string, id string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
) (*integrationstypes.CloudIntegration, error) {
var result integrationstypes.CloudIntegration
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -82,23 +83,25 @@ func (r *cloudProviderAccountsSQLRepository) get(
Where("id = ?", id).
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find account with Id %s", id,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud provider accounts: %w", err,
))
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(
err,
CodeCloudIntegrationAccountNotFound,
"couldn't find account with Id %s", id,
)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
}
return &result, nil
}
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
ctx context.Context, orgId string, provider string, accountId string,
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
) (*integrationstypes.CloudIntegration, error) {
var result integrationstypes.CloudIntegration
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -109,29 +112,25 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
Where("removed_at is NULL").
Scan(ctx)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find connected cloud account %s", accountId,
))
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(err, CodeCloudIntegrationAccountNotFound, "couldn't find connected cloud account %s", accountId)
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud provider accounts: %w", err,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
}
return &result, nil
}
func (r *cloudProviderAccountsSQLRepository) upsert(
func (r *cloudProviderAccountsSQLRepository) Upsert(
ctx context.Context,
orgId string,
provider string,
id *string,
config *types.AccountConfig,
config []byte,
accountId *string,
agentReport *types.AgentReport,
agentReport *integrationstypes.AgentReport,
removedAt *time.Time,
) (*types.CloudIntegration, *model.ApiError) {
) (*integrationstypes.CloudIntegration, error) {
// Insert
if id == nil {
temp := valuer.GenerateUUID().StringValue()
@@ -181,7 +180,7 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
)
}
integration := types.CloudIntegration{
integration := integrationstypes.CloudIntegration{
OrgID: orgId,
Provider: provider,
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
@@ -189,28 +188,25 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
Config: string(config),
AccountID: accountId,
LastAgentReport: agentReport,
RemovedAt: removedAt,
}
_, dbErr := r.store.BunDB().NewInsert().
_, err := r.store.BunDB().NewInsert().
Model(&integration).
On(onConflictClause).
Exec(ctx)
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud account record: %w", dbErr,
))
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud integration account")
}
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
))
upsertedAccount, err := r.Get(ctx, orgId, provider, *id)
if err != nil {
slog.ErrorContext(ctx, "error upserting cloud integration account", "error", err)
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't get upserted cloud integration account")
}
return upsertedAccount, nil

View File

@@ -1,64 +1,63 @@
package cloudintegrations
package store
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
CodeServiceConfigNotFound = errors.MustNewCode("service_config_not_found")
)
type ServiceConfigDatabase interface {
get(
Get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError)
) ([]byte, error)
upsert(
Upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError)
config []byte,
) ([]byte, error)
getAllForAccount(
GetAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (
configsBySvcId map[string]*types.CloudServiceConfig,
apiErr *model.ApiError,
map[string][]byte,
error,
)
}
func newServiceConfigRepository(store sqlstore.SQLStore) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
store: store,
}, nil
func NewServiceConfigRepository(store sqlstore.SQLStore) ServiceConfigDatabase {
return &serviceConfigSQLRepository{store: store}
}
type serviceConfigSQLRepository struct {
store sqlstore.SQLStore
}
func (r *serviceConfigSQLRepository) get(
func (r *serviceConfigSQLRepository) Get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) (*types.CloudServiceConfig, *model.ApiError) {
var result types.CloudIntegrationService
) ([]byte, error) {
var result integrationstypes.CloudIntegrationService
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -67,36 +66,30 @@ func (r *serviceConfigSQLRepository) get(
Where("ci.id = ?", cloudAccountId).
Where("cis.type = ?", serviceType).
Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(err, CodeServiceConfigNotFound, "couldn't find config for cloud account %s", cloudAccountId)
}
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find config for cloud account %s",
cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud service config: %w", err,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud service config")
}
return &result.Config, nil
return []byte(result.Config), nil
}
func (r *serviceConfigSQLRepository) upsert(
func (r *serviceConfigSQLRepository) Upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError) {
config []byte,
) ([]byte, error) {
// get cloud integration id from account id
// if the account is not connected, we don't need to upsert the config
var cloudIntegrationId string
err := r.store.BunDB().NewSelect().
Model((*types.CloudIntegration)(nil)).
Model((*integrationstypes.CloudIntegration)(nil)).
Column("id").
Where("provider = ?", cloudProvider).
Where("account_id = ?", cloudAccountId).
@@ -104,20 +97,24 @@ func (r *serviceConfigSQLRepository) upsert(
Where("removed_at is NULL").
Where("last_agent_report is not NULL").
Scan(ctx, &cloudIntegrationId)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud integration id: %w", err,
))
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(
err,
CodeCloudIntegrationAccountNotFound,
"couldn't find active cloud integration account",
)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud integration id")
}
serviceConfig := types.CloudIntegrationService{
serviceConfig := integrationstypes.CloudIntegrationService{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Config: config,
Config: string(config),
Type: serviceId,
CloudIntegrationID: cloudIntegrationId,
}
@@ -126,21 +123,18 @@ func (r *serviceConfigSQLRepository) upsert(
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
Exec(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", err,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud service config")
}
return &serviceConfig.Config, nil
return config, nil
}
func (r *serviceConfigSQLRepository) getAllForAccount(
func (r *serviceConfigSQLRepository) GetAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
serviceConfigs := []types.CloudIntegrationService{}
) (map[string][]byte, error) {
var serviceConfigs []integrationstypes.CloudIntegrationService
err := r.store.BunDB().NewSelect().
Model(&serviceConfigs).
@@ -149,15 +143,13 @@ func (r *serviceConfigSQLRepository) getAllForAccount(
Where("ci.org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query service configs from db")
}
result := map[string]*types.CloudServiceConfig{}
result := make(map[string][]byte)
for _, r := range serviceConfigs {
result[r.Type] = &r.Config
result[r.Type] = []byte(r.Config)
}
return result, nil

View File

@@ -6,11 +6,7 @@ import (
"database/sql"
"encoding/json"
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
"github.com/SigNoz/signoz/pkg/queryparser"
"log/slog"
"io"
"math"
@@ -25,14 +21,19 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/errors"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
@@ -44,7 +45,6 @@ import (
"github.com/SigNoz/signoz/pkg/contextlinks"
traceFunnelsModule "github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
@@ -113,7 +113,7 @@ type APIHandler struct {
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
cloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
@@ -162,9 +162,6 @@ type APIHandlerOpts struct {
// Integrations
IntegrationsController *integrations.Controller
// Cloud Provider Integrations
CloudIntegrationsController *cloudintegrations.Controller
// Log parsing pipelines
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
@@ -180,6 +177,8 @@ type APIHandlerOpts struct {
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz
Logger *slog.Logger
}
// NewAPIHandler returns an APIHandler
@@ -215,12 +214,18 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
summaryService := metricsexplorer.NewSummaryService(opts.Reader, opts.RuleManager, opts.Signoz.Modules.Dashboard)
//quickFilterModule := quickfilter.NewAPI(opts.QuickFilterModule)
cloudIntegrationsRegistry := cloudintegrations.NewCloudProviderRegistry(
opts.Logger,
opts.Signoz.SQLStore,
opts.Signoz.Querier,
)
aH := &APIHandler{
reader: opts.Reader,
temporalityMap: make(map[string]map[v3.Temporality]bool),
ruleManager: opts.RuleManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
cloudIntegrationsRegistry: cloudIntegrationsRegistry,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
querier: querier,
querierV2: querierv2,
@@ -1217,13 +1222,22 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
}
dashboard := new(dashboardtypes.Dashboard)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
if integrationstypes.IsCloudIntegrationDashboardUuid(id) {
cloudProvider, err := integrationstypes.GetCloudProviderFromDashboardID(id)
if err != nil {
render.Error(rw, err)
return
}
dashboard = cloudIntegrationDashboard
integrationDashboard, err := aH.cloudIntegrationsRegistry[cloudProvider].GetDashboard(ctx, &integrationstypes.GettableDashboard{
ID: id,
OrgID: orgID,
})
if err != nil {
render.Error(rw, err)
return
}
dashboard = integrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1287,11 +1301,13 @@ func (aH *APIHandler) List(rw http.ResponseWriter, r *http.Request) {
dashboards = append(dashboards, installedIntegrationDashboards...)
}
cloudIntegrationDashboards, apiErr := aH.CloudIntegrationsController.AvailableDashboards(ctx, orgID)
if apiErr != nil {
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
} else {
dashboards = append(dashboards, cloudIntegrationDashboards...)
for _, provider := range aH.cloudIntegrationsRegistry {
cloudIntegrationDashboards, err := provider.GetAvailableDashboards(ctx, orgID)
if err != nil {
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
} else {
dashboards = append(dashboards, cloudIntegrationDashboards...)
}
}
gettableDashboards, err := dashboardtypes.NewGettableDashboardsFromDashboards(dashboards)
@@ -3267,15 +3283,15 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(w http.ResponseWriter, r *h
lookbackSeconds = 15 * 60
}
connectionStatus, apiErr := aH.calculateConnectionStatus(
connectionStatus, err := aH.calculateConnectionStatus(
r.Context(), orgID, connectionTests, lookbackSeconds,
)
if apiErr != nil {
RespondError(w, apiErr, "Failed to calculate integration connection status")
if err != nil {
render.Error(w, err)
return
}
aH.Respond(w, connectionStatus)
render.Success(w, http.StatusOK, connectionStatus)
}
func (aH *APIHandler) calculateConnectionStatus(
@@ -3283,10 +3299,11 @@ func (aH *APIHandler) calculateConnectionStatus(
orgID valuer.UUID,
connectionTests *integrations.IntegrationConnectionTests,
lookbackSeconds int64,
) (*integrations.IntegrationConnectionStatus, *model.ApiError) {
) (*integrations.IntegrationConnectionStatus, error) {
// Calculate connection status for signals in parallel
result := &integrations.IntegrationConnectionStatus{}
// TODO: migrate to errors package
errors := []*model.ApiError{}
var resultLock sync.Mutex
@@ -3484,12 +3501,14 @@ func (aH *APIHandler) UninstallIntegration(w http.ResponseWriter, r *http.Reques
aH.Respond(w, map[string]interface{}{})
}
// cloud provider integrations
// RegisterCloudIntegrationsRoutes register routes for cloud provider integrations
func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1/cloud-integrations").Subrouter()
subRouter.Use(middleware.NewRecovery(aH.Signoz.Instrumentation.Logger()).Wrap)
subRouter.HandleFunc(
"/{cloudProvider}/accounts/generate-connection-url", am.EditAccess(aH.CloudIntegrationsGenerateConnectionUrl),
"/{cloudProvider}/accounts/generate-connection-url", am.EditAccess(aH.CloudIntegrationsGenerateConnectionArtifact),
).Methods(http.MethodPost)
subRouter.HandleFunc(
@@ -3523,170 +3542,199 @@ func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *mi
subRouter.HandleFunc(
"/{cloudProvider}/services/{serviceId}/config", am.EditAccess(aH.CloudIntegrationsUpdateServiceConfig),
).Methods(http.MethodPost)
}
func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
r.Context(), claims.OrgID, cloudProvider,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, resp)
}
func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
req := cloudintegrations.GenerateConnectionUrlRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
r.Context(), claims.OrgID, cloudProvider, req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, result)
}
func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
accountId := mux.Vars(r)["accountId"]
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, resp)
}
func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
req := cloudintegrations.AgentCheckInRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
result, err := aH.CloudIntegrationsController.CheckInAsAgent(
r.Context(), claims.OrgID, cloudProvider, req,
)
func (aH *APIHandler) CloudIntegrationsGenerateConnectionArtifact(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
aH.Respond(w, result)
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
reqBody, err := io.ReadAll(r.Body)
if err != nil {
render.Error(w, errors.WrapInternalf(err, errors.CodeInternal, "failed to read request body"))
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GenerateConnectionArtifact(r.Context(), &integrationstypes.PostableConnectionArtifact{
OrgID: claims.OrgID,
Data: reqBody,
})
if err != nil {
aH.Signoz.Instrumentation.Logger().ErrorContext(r.Context(),
"failed to generate connection artifact for cloud integration",
slog.String("cloudProvider", cloudProviderString),
slog.String("orgID", claims.OrgID),
)
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, resp)
}
func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].ListConnectedAccounts(r.Context(), claims.OrgID)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, resp)
}
func (aH *APIHandler) CloudIntegrationsGetAccountStatus(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
accountId := mux.Vars(r)["accountId"]
req := cloudintegrations.UpdateAccountConfigRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GetAccountStatus(r.Context(), claims.OrgID, accountId)
if err != nil {
render.Error(w, err)
return
}
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
r.Context(), claims.OrgID, cloudProvider, accountId, req,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.Respond(w, result)
render.Success(w, http.StatusOK, resp)
}
func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
func (aH *APIHandler) CloudIntegrationsAgentCheckIn(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
req := new(integrationstypes.PostableAgentCheckInPayload)
if err = json.NewDecoder(r.Body).Decode(req); err != nil {
render.Error(w, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid request body"))
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
req.OrgID = claims.OrgID
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].AgentCheckIn(r.Context(), req)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, resp)
}
func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
accountId := mux.Vars(r)["accountId"]
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
reqBody, err := io.ReadAll(r.Body)
if err != nil {
render.Error(w, errors.WrapInternalf(err, errors.CodeInternal, "failed to read request body"))
return
}
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
r.Context(), claims.OrgID, cloudProvider, accountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].UpdateAccountConfig(r.Context(), &integrationstypes.PatchableAccountConfig{
OrgID: claims.OrgID,
AccountId: accountId,
Data: reqBody,
})
if err != nil {
render.Error(w, err)
return
}
aH.Respond(w, result)
render.Success(w, http.StatusOK, resp)
return
}
func (aH *APIHandler) CloudIntegrationsListServices(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
func (aH *APIHandler) CloudIntegrationsDisconnectAccount(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
accountId := mux.Vars(r)["accountId"]
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
result, err := aH.cloudIntegrationsRegistry[cloudProvider].DisconnectAccount(r.Context(), claims.OrgID, accountId)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, result)
}
func (aH *APIHandler) CloudIntegrationsListServices(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
var cloudAccountId *string
@@ -3695,26 +3743,22 @@ func (aH *APIHandler) CloudIntegrationsListServices(
cloudAccountId = &cloudAccountIdQP
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
resp, apiErr := aH.CloudIntegrationsController.ListServices(
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].ListServices(r.Context(), claims.OrgID, cloudAccountId)
if err != nil {
render.Error(w, err)
return
}
aH.Respond(w, resp)
render.Success(w, http.StatusOK, resp)
}
func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
w http.ResponseWriter, r *http.Request,
) {
func (aH *APIHandler) CloudIntegrationsGetServiceDetails(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
@@ -3726,7 +3770,14 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
return
}
cloudProvider := mux.Vars(r)["cloudProvider"]
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
return
}
serviceId := mux.Vars(r)["serviceId"]
var cloudAccountId *string
@@ -3736,270 +3787,59 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
cloudAccountId = &cloudAccountIdQP
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
resp, err := aH.CloudIntegrationsController.GetServiceDetails(
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
)
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GetServiceDetails(r.Context(), &integrationstypes.GetServiceDetailsReq{
OrgID: orgID,
ServiceId: serviceId,
CloudAccountID: cloudAccountId,
})
if err != nil {
render.Error(w, err)
return
}
// Add connection status for the 2 signals.
if cloudAccountId != nil {
connStatus, apiErr := aH.calculateCloudIntegrationServiceConnectionStatus(
r.Context(), orgID, cloudProvider, *cloudAccountId, resp,
)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
resp.ConnectionStatus = connStatus
}
aH.Respond(w, resp)
render.Success(w, http.StatusOK, resp)
return
}
func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
ctx context.Context,
orgID valuer.UUID,
cloudProvider string,
cloudAccountId string,
svcDetails *cloudintegrations.ServiceDetails,
) (*cloudintegrations.ServiceConnectionStatus, *model.ApiError) {
if cloudProvider != "aws" {
// TODO(Raj): Make connection check generic for all providers in a follow up change
return nil, model.BadRequest(
fmt.Errorf("unsupported cloud provider: %s", cloudProvider),
)
}
func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(w http.ResponseWriter, r *http.Request) {
cloudProviderString := mux.Vars(r)["cloudProvider"]
telemetryCollectionStrategy := svcDetails.Strategy
if telemetryCollectionStrategy == nil {
return nil, model.InternalError(fmt.Errorf(
"service doesn't have telemetry collection strategy: %s", svcDetails.Id,
))
}
result := &cloudintegrations.ServiceConnectionStatus{}
errors := []*model.ApiError{}
var resultLock sync.Mutex
var wg sync.WaitGroup
// Calculate metrics connection status
if telemetryCollectionStrategy.AWSMetrics != nil {
wg.Add(1)
go func() {
defer wg.Done()
metricsConnStatus, apiErr := aH.calculateAWSIntegrationSvcMetricsConnectionStatus(
ctx, cloudAccountId, telemetryCollectionStrategy.AWSMetrics, svcDetails.DataCollected.Metrics,
)
resultLock.Lock()
defer resultLock.Unlock()
if apiErr != nil {
errors = append(errors, apiErr)
} else {
result.Metrics = metricsConnStatus
}
}()
}
// Calculate logs connection status
if telemetryCollectionStrategy.AWSLogs != nil {
wg.Add(1)
go func() {
defer wg.Done()
logsConnStatus, apiErr := aH.calculateAWSIntegrationSvcLogsConnectionStatus(
ctx, orgID, cloudAccountId, telemetryCollectionStrategy.AWSLogs,
)
resultLock.Lock()
defer resultLock.Unlock()
if apiErr != nil {
errors = append(errors, apiErr)
} else {
result.Logs = logsConnStatus
}
}()
}
wg.Wait()
if len(errors) > 0 {
return nil, errors[0]
}
return result, nil
}
func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
ctx context.Context,
cloudAccountId string,
strategy *services.AWSMetricsStrategy,
metricsCollectedBySvc []services.CollectedMetric,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.StreamFilters) < 1 {
return nil, nil
}
expectedLabelValues := map[string]string{
"cloud_provider": "aws",
"cloud_account_id": cloudAccountId,
}
metricsNamespace := strategy.StreamFilters[0].Namespace
metricsNamespaceParts := strings.Split(metricsNamespace, "/")
if len(metricsNamespaceParts) >= 2 {
expectedLabelValues["service_namespace"] = metricsNamespaceParts[0]
expectedLabelValues["service_name"] = metricsNamespaceParts[1]
} else {
// metrics for single word namespaces like "CWAgent" do not
// have the service_namespace label populated
expectedLabelValues["service_name"] = metricsNamespaceParts[0]
}
metricNamesCollectedBySvc := []string{}
for _, cm := range metricsCollectedBySvc {
metricNamesCollectedBySvc = append(metricNamesCollectedBySvc, cm.Name)
}
statusForLastReceivedMetric, apiErr := aH.reader.GetLatestReceivedMetric(
ctx, metricNamesCollectedBySvc, expectedLabelValues,
)
if apiErr != nil {
return nil, apiErr
}
if statusForLastReceivedMetric != nil {
return &cloudintegrations.SignalConnectionStatus{
LastReceivedTsMillis: statusForLastReceivedMetric.LastReceivedTsMillis,
LastReceivedFrom: "signoz-aws-integration",
}, nil
}
return nil, nil
}
func (aH *APIHandler) calculateAWSIntegrationSvcLogsConnectionStatus(
ctx context.Context,
orgID valuer.UUID,
cloudAccountId string,
strategy *services.AWSLogsStrategy,
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
if strategy == nil || len(strategy.Subscriptions) < 1 {
return nil, nil
}
logGroupNamePrefix := strategy.Subscriptions[0].LogGroupNamePrefix
if len(logGroupNamePrefix) < 1 {
return nil, nil
}
logsConnTestFilter := &v3.FilterSet{
Operator: "AND",
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "cloud.account.id",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: "=",
Value: cloudAccountId,
},
{
Key: v3.AttributeKey{
Key: "aws.cloudwatch.log_group_name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: "like",
Value: logGroupNamePrefix + "%",
},
},
}
// TODO(Raj): Receive this as a param from UI in the future.
lookbackSeconds := int64(30 * 60)
qrParams := &v3.QueryRangeParamsV3{
Start: time.Now().UnixMilli() - (lookbackSeconds * 1000),
End: time.Now().UnixMilli(),
CompositeQuery: &v3.CompositeQuery{
PanelType: v3.PanelTypeList,
QueryType: v3.QueryTypeBuilder,
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
PageSize: 1,
Filters: logsConnTestFilter,
QueryName: "A",
DataSource: v3.DataSourceLogs,
Expression: "A",
AggregateOperator: v3.AggregateOperatorNoOp,
},
},
},
}
queryRes, _, err := aH.querier.QueryRange(
ctx, orgID, qrParams,
)
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
return nil, model.InternalError(fmt.Errorf(
"could not query for integration connection status: %w", err,
))
}
if len(queryRes) > 0 && queryRes[0].List != nil && len(queryRes[0].List) > 0 {
lastLog := queryRes[0].List[0]
return &cloudintegrations.SignalConnectionStatus{
LastReceivedTsMillis: lastLog.Timestamp.UnixMilli(),
LastReceivedFrom: "signoz-aws-integration",
}, nil
render.Error(w, err)
return
}
return nil, nil
}
func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
w http.ResponseWriter, r *http.Request,
) {
cloudProvider := mux.Vars(r)["cloudProvider"]
serviceId := mux.Vars(r)["serviceId"]
req := cloudintegrations.UpdateServiceConfigRequest{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
render.Error(w, errv2)
return
}
result, err := aH.CloudIntegrationsController.UpdateServiceConfig(
r.Context(), claims.OrgID, cloudProvider, serviceId, &req,
)
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
aH.Respond(w, result)
reqBody, err := io.ReadAll(r.Body)
if err != nil {
render.Error(w, errors.WrapInternalf(err,
errors.CodeInternal,
"failed to read update service config request body",
))
return
}
result, err := aH.cloudIntegrationsRegistry[cloudProvider].UpdateServiceConfig(
r.Context(), &integrationstypes.PatchableServiceConfig{
OrgID: claims.OrgID,
ServiceId: serviceId,
Config: reqBody,
},
)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, result)
}
// logs

View File

@@ -11,6 +11,7 @@ import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -107,7 +108,7 @@ type IntegrationsListItem struct {
type Integration struct {
IntegrationDetails
Installation *types.InstalledIntegration `json:"installation"`
Installation *integrationstypes.InstalledIntegration `json:"installation"`
}
type Manager struct {
@@ -223,7 +224,7 @@ func (m *Manager) InstallIntegration(
ctx context.Context,
orgId string,
integrationId string,
config types.InstalledIntegrationConfig,
config integrationstypes.InstalledIntegrationConfig,
) (*IntegrationsListItem, *model.ApiError) {
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
if apiErr != nil {
@@ -429,7 +430,7 @@ func (m *Manager) getInstalledIntegration(
ctx context.Context,
orgId string,
integrationId string,
) (*types.InstalledIntegration, *model.ApiError) {
) (*integrationstypes.InstalledIntegration, *model.ApiError) {
iis, apiErr := m.installedIntegrationsRepo.get(
ctx, orgId, []string{integrationId},
)
@@ -457,7 +458,7 @@ func (m *Manager) getInstalledIntegrations(
return nil, apiErr
}
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
installedTypes := utils.MapSlice(installations, func(i integrationstypes.InstalledIntegration) string {
return i.Type
})
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)

View File

@@ -4,22 +4,22 @@ import (
"context"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
)
type InstalledIntegrationsRepo interface {
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
list(ctx context.Context, orgId string) ([]integrationstypes.InstalledIntegration, *model.ApiError)
get(
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]types.InstalledIntegration, *model.ApiError)
) (map[string]integrationstypes.InstalledIntegration, *model.ApiError)
upsert(
ctx context.Context,
orgId string,
integrationType string,
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError)
config integrationstypes.InstalledIntegrationConfig,
) (*integrationstypes.InstalledIntegration, *model.ApiError)
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
}

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
@@ -26,8 +27,8 @@ func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
func (r *InstalledIntegrationsSqliteRepo) list(
ctx context.Context,
orgId string,
) ([]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
) ([]integrationstypes.InstalledIntegration, *model.ApiError) {
integrations := []integrationstypes.InstalledIntegration{}
err := r.store.BunDB().NewSelect().
Model(&integrations).
@@ -44,8 +45,8 @@ func (r *InstalledIntegrationsSqliteRepo) list(
func (r *InstalledIntegrationsSqliteRepo) get(
ctx context.Context, orgId string, integrationTypes []string,
) (map[string]types.InstalledIntegration, *model.ApiError) {
integrations := []types.InstalledIntegration{}
) (map[string]integrationstypes.InstalledIntegration, *model.ApiError) {
integrations := []integrationstypes.InstalledIntegration{}
typeValues := []interface{}{}
for _, integrationType := range integrationTypes {
@@ -62,7 +63,7 @@ func (r *InstalledIntegrationsSqliteRepo) get(
))
}
result := map[string]types.InstalledIntegration{}
result := map[string]integrationstypes.InstalledIntegration{}
for _, ii := range integrations {
result[ii.Type] = ii
}
@@ -74,10 +75,10 @@ func (r *InstalledIntegrationsSqliteRepo) upsert(
ctx context.Context,
orgId string,
integrationType string,
config types.InstalledIntegrationConfig,
) (*types.InstalledIntegration, *model.ApiError) {
config integrationstypes.InstalledIntegrationConfig,
) (*integrationstypes.InstalledIntegration, *model.ApiError) {
integration := types.InstalledIntegration{
integration := integrationstypes.InstalledIntegration{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
@@ -114,7 +115,7 @@ func (r *InstalledIntegrationsSqliteRepo) delete(
ctx context.Context, orgId string, integrationType string,
) *model.ApiError {
_, dbErr := r.store.BunDB().NewDelete().
Model(&types.InstalledIntegration{}).
Model(&integrationstypes.InstalledIntegration{}).
Where("type = ?", integrationType).
Where("org_id = ?", orgId).
Exec(ctx)

View File

@@ -26,7 +26,6 @@ import (
querierAPI "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
@@ -71,11 +70,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
return nil, err
}
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
if err != nil {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
@@ -127,7 +121,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
Reader: reader,
RuleManager: rm,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
@@ -135,6 +128,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
Logger: signoz.Instrumentation.Logger(),
})
if err != nil {
return nil, err

View File

@@ -43,11 +43,11 @@ var (
// FromUnit returns a converter for the given unit
func FromUnit(u Unit) Converter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationConverter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataConverter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateConverter
case "percent", "percentunit", "%":
return PercentConverter

View File

@@ -58,80 +58,36 @@ func (*dataConverter) Name() string {
return "data"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataUnit(u Unit) float64 {
switch u {
case "bytes", "By": // base 2
return Byte
case "decbytes": // base 10
return Byte
case "bits", "bit": // base 2
case "bits": // base 2
return Bit
case "decbits": // base 10
return Bit
case "kbytes", "KiBy": // base 2
case "kbytes", "kBy": // base 2
return Kibibyte
case "decKbytes", "deckbytes", "kBy": // base 10
case "decKbytes", "deckbytes": // base 10
return Kilobyte
case "mbytes", "MiBy": // base 2
case "mbytes", "MBy": // base 2
return Mebibyte
case "decMbytes", "decmbytes", "MBy": // base 10
case "decMbytes", "decmbytes": // base 10
return Megabyte
case "gbytes", "GiBy": // base 2
case "gbytes", "GBy": // base 2
return Gibibyte
case "decGbytes", "decgbytes", "GBy": // base 10
case "decGbytes", "decgbytes": // base 10
return Gigabyte
case "tbytes", "TiBy": // base 2
case "tbytes", "TBy": // base 2
return Tebibyte
case "decTbytes", "dectbytes", "TBy": // base 10
case "decTbytes", "dectbytes": // base 10
return Terabyte
case "pbytes", "PiBy": // base 2
case "pbytes", "PBy": // base 2
return Pebibyte
case "decPbytes", "decpbytes", "PBy": // base 10
case "decPbytes", "decpbytes": // base 10
return Petabyte
case "EBy": // base 10
return Exabyte
case "ZBy": // base 10
return Zettabyte
case "YBy": // base 10
return Yottabyte
case "Kibit": // base 2
return Kibibit
case "Mibit": // base 2
return Mebibit
case "Gibit": // base 2
return Gibibit
case "Tibit": // base 2
return Tebibit
case "Pibit": // base 2
return Pebibit
case "EiBy": // base 2
return Exbibyte
case "ZiBy": // base 2
return Zebibyte
case "YiBy": // base 2
return Yobibyte
case "kbit": // base 10
return Kilobit
case "Mbit": // base 10
return Megabit
case "Gbit": // base 10
return Gigabit
case "Tbit": // base 10
return Terabit
case "Pbit": // base 10
return Petabit
case "Ebit": // base 10
return Exabit
case "Zbit": // base 10
return Zettabit
case "Ybit": // base 10
return Yottabit
default:
return 1
}

View File

@@ -54,12 +54,6 @@ func (*dataRateConverter) Name() string {
return "data_rate"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataRateUnit(u Unit) float64 {
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
switch u {
@@ -71,70 +65,46 @@ func FromDataRateUnit(u Unit) float64 {
return BitPerSecond
case "bps", "bit/s": // bits/sec(SI)
return BitPerSecond
case "KiBs", "KiBy/s": // kibibytes/sec
case "KiBs": // kibibytes/sec
return KibibytePerSecond
case "Kibits", "Kibit/s": // kibibits/sec
case "Kibits": // kibibits/sec
return KibibitPerSecond
case "KBs", "kBy/s": // kilobytes/sec
return KilobytePerSecond
case "Kbits", "kbit/s": // kilobits/sec
return KilobitPerSecond
case "MiBs", "MiBy/s": // mebibytes/sec
case "MiBs": // mebibytes/sec
return MebibytePerSecond
case "Mibits", "Mibit/s": // mebibits/sec
case "Mibits": // mebibits/sec
return MebibitPerSecond
case "MBs", "MBy/s": // megabytes/sec
return MegabytePerSecond
case "Mbits", "Mbit/s": // megabits/sec
return MegabitPerSecond
case "GiBs", "GiBy/s": // gibibytes/sec
case "GiBs": // gibibytes/sec
return GibibytePerSecond
case "Gibits", "Gibit/s": // gibibits/sec
case "Gibits": // gibibits/sec
return GibibitPerSecond
case "GBs", "GBy/s": // gigabytes/sec
return GigabytePerSecond
case "Gbits", "Gbit/s": // gigabits/sec
return GigabitPerSecond
case "TiBs", "TiBy/s": // tebibytes/sec
case "TiBs": // tebibytes/sec
return TebibytePerSecond
case "Tibits", "Tibit/s": // tebibits/sec
case "Tibits": // tebibits/sec
return TebibitPerSecond
case "TBs", "TBy/s": // terabytes/sec
return TerabytePerSecond
case "Tbits", "Tbit/s": // terabits/sec
return TerabitPerSecond
case "PiBs", "PiBy/s": // pebibytes/sec
case "PiBs": // pebibytes/sec
return PebibytePerSecond
case "Pibits", "Pibit/s": // pebibits/sec
case "Pibits": // pebibits/sec
return PebibitPerSecond
case "PBs", "PBy/s": // petabytes/sec
return PetabytePerSecond
case "Pbits", "Pbit/s": // petabits/sec
return PetabitPerSecond
case "EBy/s": // exabytes/sec
return ExabytePerSecond
case "Ebit/s": // exabits/sec
return ExabitPerSecond
case "EiBy/s": // exbibytes/sec
return ExbibytePerSecond
case "Eibit/s": // exbibits/sec
return ExbibitPerSecond
case "ZBy/s": // zettabytes/sec
return ZettabytePerSecond
case "Zbit/s": // zettabits/sec
return ZettabitPerSecond
case "ZiBy/s": // zebibytes/sec
return ZebibytePerSecond
case "Zibit/s": // zebibits/sec
return ZebibitPerSecond
case "YBy/s": // yottabytes/sec
return YottabytePerSecond
case "Ybit/s": // yottabits/sec
return YottabitPerSecond
case "YiBy/s": // yobibytes/sec
return YobibytePerSecond
case "Yibit/s": // yobibits/sec
return YobibitPerSecond
default:
return 1
}

View File

@@ -75,83 +75,3 @@ func TestDataRate(t *testing.T) {
// 1024 * 1024 * 1024 bytes = 1 gbytes
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
}
func TestDataRateConversionUCUMUnit(t *testing.T) {
dataRateConverter := NewDataRateConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Binary byte scaling
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
// Unit alias
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
// Binary bit scaling (Exbi, Zebi, Yobi)
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
// Bytes to bits (Exbi, Zebi, Yobi)
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
// 1024 bytes = 1 kbytes
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
// 1 byte = 8 bits
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
// 1 mbytes = 1024 kbytes
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
// 1024 kbytes = 1 mbytes
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
// 1 mbytes = 1024 * 1024 bytes
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
// 1024 mbytes = 1 gbytes
@@ -45,90 +45,10 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
// 1024 tbytes = 1 PiBy
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
// 1 pbytes = 1024 tbytes
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
// 1024 TiBy = 1 pbytes
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
}
func TestDataConversionUCUMUnit(t *testing.T) {
dataConverter := NewDataConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Bits to bytes
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
// Binary byte scaling
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
// SI bit scaling
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
// 1024 pbytes = 1 tbytes
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
}

View File

@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
return Hour
case "d":
return Day
case "w", "wk":
case "w":
return Week
default:
return Second

View File

@@ -54,13 +54,4 @@ func TestDurationConvert(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
// 1000000000 ns = 1 s
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
// 7 d = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
// 1 wk = 7 d
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
// 1 wk = 168 h
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
// 604800 s = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
}

View File

@@ -24,30 +24,30 @@ func (f *dataFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value))
case "decbytes":
return humanize.Bytes(uint64(value))
case "kbytes", "KiBy", "Kibit":
case "bits":
return humanize.IBytes(uint64(value * converter.Bit))
case "decbits":
return humanize.Bytes(uint64(value * converter.Bit))
case "kbytes", "kBy":
return humanize.IBytes(uint64(value * converter.Kibibit))
case "decKbytes", "deckbytes", "kBy", "kbit":
return humanize.Bytes(uint64(value * converter.Kilobit))
case "mbytes", "MiBy", "Mibit":
case "decKbytes", "deckbytes":
return humanize.IBytes(uint64(value * converter.Kilobit))
case "mbytes", "MBy":
return humanize.IBytes(uint64(value * converter.Mebibit))
case "decMbytes", "decmbytes", "MBy", "Mbit":
case "decMbytes", "decmbytes":
return humanize.Bytes(uint64(value * converter.Megabit))
case "gbytes", "GiBy", "Gibit":
case "gbytes", "GBy":
return humanize.IBytes(uint64(value * converter.Gibibit))
case "decGbytes", "decgbytes", "GBy", "Gbit":
case "decGbytes", "decgbytes":
return humanize.Bytes(uint64(value * converter.Gigabit))
case "tbytes", "TiBy", "Tibit":
case "tbytes", "TBy":
return humanize.IBytes(uint64(value * converter.Tebibit))
case "decTbytes", "dectbytes", "TBy", "Tbit":
case "decTbytes", "dectbytes":
return humanize.Bytes(uint64(value * converter.Terabit))
case "pbytes", "PiBy", "Pibit":
case "pbytes", "PBy":
return humanize.IBytes(uint64(value * converter.Pebibit))
case "decPbytes", "decpbytes", "PBy", "Pbit":
case "decPbytes", "decpbytes":
return humanize.Bytes(uint64(value * converter.Petabit))
case "EiBy":
return humanize.IBytes(uint64(value * converter.Exbibit))
case "EBy", "Ebit":
return humanize.Bytes(uint64(value * converter.Exabit))
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -28,31 +28,46 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
case "bps", "bit/s":
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
case "KiBs", "KiBy/s", "Kibits", "Kibit/s":
case "KiBs":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
case "KBs", "kBy/s", "Kbits", "kbit/s":
case "Kibits":
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
case "KBs", "kBy/s":
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
case "MiBs", "MiBy/s", "Mibits", "Mibit/s":
case "Kbits", "kbit/s":
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
case "MiBs":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
case "MBs", "MBy/s", "Mbits", "Mbit/s":
case "Mibits":
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
case "MBs", "MBy/s":
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
case "GiBs", "GiBy/s", "Gibits", "Gibit/s":
case "Mbits", "Mbit/s":
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
case "GiBs":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
case "GBs", "GBy/s", "Gbits", "Gbit/s":
case "Gibits":
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
case "GBs", "GBy/s":
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
case "TiBs", "TiBy/s", "Tibits", "Tibit/s":
case "Gbits", "Gbit/s":
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
case "TiBs":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
case "TBs", "TBy/s", "Tbits", "Tbit/s":
case "Tibits":
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
case "TBs", "TBy/s":
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
case "PiBs", "PiBy/s", "Pibits", "Pibit/s":
case "Tbits", "Tbit/s":
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
case "PiBs":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
case "PBs", "PBy/s", "Pbits", "Pbit/s":
case "Pibits":
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
case "PBs", "PBy/s":
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
// Exa units
case "EBy/s", "Ebit/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
case "EiBy/s", "Eibit/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
case "Pbits", "Pbit/s":
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -1,149 +0,0 @@
package formatter
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDataRateFormatterComprehensive(t *testing.T) {
dataRateFormatter := NewDataRateFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Base bytes/sec - binBps
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
// SI Base bytes/sec - Bps, By/s
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
// IEC Base bits/sec - binbps
{name: "binbps as Bps", value: 1, unit: "binbps", expected: "1 B/s"},
{name: "binbps as Kibps", value: 1024, unit: "binbps", expected: "1.0 KiB/s"},
{name: "binbps as Mibps", value: 1024 * 1024, unit: "binbps", expected: "1.0 MiB/s"},
// SI Base bits/sec - bps, bit/s
{name: "bps as kbps", value: 1000, unit: "bps", expected: "1.0 kB/s"},
{name: "bitps as kbps", value: 1000, unit: "bit/s", expected: "1.0 kB/s"},
// Kibibytes/sec - KiBs, KiBy/s
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
// Kibibits/sec - Kibits, Kibit/s
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "1.0 KiB/s"},
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "42 MiB/s"},
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "10 KiB/s"},
// Kilobytes/sec (SI) - KBs, kBy/s
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
// Kilobits/sec (SI) - Kbits, kbit/s
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "1000 B/s"},
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "1000 B/s"},
// Mebibytes/sec - MiBs, MiBy/s
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
// Mebibits/sec - Mibits, Mibit/s
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "40 MiB/s"},
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "10 MiB/s"},
// Megabytes/sec (SI) - MBs, MBy/s
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
// Megabits/sec (SI) - Mbits, Mbit/s
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "977 KiB/s"},
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "977 KiB/s"},
// Gibibytes/sec - GiBs, GiBy/s
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
// Gibibits/sec - Gibits, Gibit/s
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "42 TiB/s"},
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "42 TiB/s"},
// Gigabytes/sec (SI) - GBs, GBy/s
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
// Gigabits/sec (SI) - Gbits, Gbit/s
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "38 TiB/s"},
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "38 TiB/s"},
// Tebibytes/sec - TiBs, TiBy/s
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
// Tebibits/sec - Tibits, Tibit/s
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "42 PiB/s"},
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "42 PiB/s"},
// Terabytes/sec (SI) - TBs, TBy/s
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
// Terabits/sec (SI) - Tbits, Tbit/s
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "37 PiB/s"},
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "37 PiB/s"},
// Pebibytes/sec - PiBs, PiBy/s
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
// Pebibits/sec - Pibits, Pibit/s
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "10 EiB/s"},
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "10 EiB/s"},
// Petabytes/sec (SI) - PBs, PBy/s
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
// Petabits/sec (SI) - Pbits, Pbit/s
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "37 PiB/s"},
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "37 PiB/s"},
// Exabytes/sec (SI) - EBy/s
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
// Exabits/sec (SI) - Ebit/s
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "10 EB/s"},
// Exbibytes/sec (IEC) - EiBy/s
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
// Exbibits/sec (IEC) - Eibit/s
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "10 EiB/s"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -14,161 +14,21 @@ func TestData(t *testing.T) {
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
}
func TestDataFormatterComprehensive(t *testing.T) {
dataFormatter := NewDataFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Base bytes - bytes, By
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
// SI Base bytes - decbytes
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
// Kibibytes - kbytes, KiBy (IEC)
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
// SI Kilobytes - decKbytes, deckbytes, kBy
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
// Mebibytes - mbytes, MiBy (IEC)
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
// SI Megabytes - decMbytes, decmbytes, MBy
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
// Gibibytes - gbytes, GiBy (IEC)
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
// SI Gigabytes - decGbytes, decgbytes, GBy
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
// Tebibytes - tbytes, TiBy (IEC)
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
// SI Terabytes - decTbytes, dectbytes, TBy
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
// Pebibytes - pbytes, PiBy (IEC)
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
// SI Petabytes - decPbytes, decpbytes, PBy
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
// Exbibytes - EiBy (IEC)
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
// Exabytes - EBy (SI)
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
// Kibibits - Kibit (IEC)
{name: "Kibit: 1 = 1 KiB", value: 1, unit: "Kibit", expected: "1.0 KiB"},
{name: "Kibit: 1024 = 1 MiB", value: 1024, unit: "Kibit", expected: "1.0 MiB"},
// Mebibits - Mibit (IEC)
{name: "Mibit: 1 = 1 MiB", value: 1, unit: "Mibit", expected: "1.0 MiB"},
{name: "Mibit: 1024 = 1 GiB", value: 1024, unit: "Mibit", expected: "1.0 GiB"},
// Gibibits - Gibit (IEC)
{name: "Gibit: 42*1024 = 42 TiB", value: 42 * 1024, unit: "Gibit", expected: "42 TiB"},
// Tebibits - Tibit (IEC)
{name: "Tibit: 42*1024 = 42 PiB", value: 42 * 1024, unit: "Tibit", expected: "42 PiB"},
// Pebibits - Pibit (IEC)
{name: "Pibit: 10*1024 = 10 EiB", value: 10 * 1024, unit: "Pibit", expected: "10 EiB"},
// Kilobits - kbit (SI)
{name: "kbit: 1 = 1 kB", value: 1, unit: "kbit", expected: "1.0 kB"},
{name: "kbit: 1000 = 1 MB", value: 1000, unit: "kbit", expected: "1.0 MB"},
// Megabits - Mbit (SI)
{name: "Mbit: 1 = 1 MB", value: 1, unit: "Mbit", expected: "1.0 MB"},
{name: "Mbit: 1000 = 1 GB", value: 1000, unit: "Mbit", expected: "1.0 GB"},
// Gigabits - Gbit (SI)
{name: "Gbit: 42*1000 = 42 TB", value: 42 * 1000, unit: "Gbit", expected: "42 TB"},
// Terabits - Tbit (SI)
{name: "Tbit: 42*1000 = 42 PB", value: 42 * 1000, unit: "Tbit", expected: "42 PB"},
// Petabits - Pbit (SI)
{name: "Pbit: 42 = 42 PB", value: 42, unit: "Pbit", expected: "42 PB"},
// Exabits - Ebit (SI)
{name: "Ebit: 10 = 10 EB", value: 10, unit: "Ebit", expected: "10 EB"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
}

View File

@@ -18,11 +18,11 @@ var (
func FromUnit(u string) Formatter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationFormatter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataFormatter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateFormatter
case "percent", "percentunit", "%":
return PercentFormatter

View File

@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
return toHours(value)
case "d":
return toDays(value)
case "w", "wk":
case "w":
return toWeeks(value)
}
// When unit is not matched, return the value as it is.

View File

@@ -1,247 +0,0 @@
package types
import (
"database/sql/driver"
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/uptrace/bun"
)
type IntegrationUserEmail string
const (
AWSIntegrationUserEmail IntegrationUserEmail = "aws-integration@signoz.io"
)
var AllIntegrationUserEmails = []IntegrationUserEmail{
AWSIntegrationUserEmail,
}
// --------------------------------------------------------------------------
// Normal integration uses just the installed_integration table
// --------------------------------------------------------------------------
type InstalledIntegration struct {
bun.BaseModel `bun:"table:installed_integration"`
Identifiable
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
}
type InstalledIntegrationConfig map[string]interface{}
// For serializing from db
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// For serializing to db
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not serialize integration config to JSON")
}
return filterSetJson, nil
}
// --------------------------------------------------------------------------
// Cloud integration uses the cloud_integration table
// and cloud_integrations_service table
// --------------------------------------------------------------------------
type CloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integration"`
Identifiable
TimeAuditable
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
Config *AccountConfig `json:"config" bun:"config,type:text"`
AccountID *string `json:"account_id" bun:"account_id,type:text"`
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
OrgID string `bun:"org_id,type:text,unique:provider_id"`
}
func (a *CloudIntegration) Status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *CloudIntegration) Account() Account {
ca := Account{Id: a.ID.StringValue(), Status: a.Status()}
if a.AccountID != nil {
ca.CloudAccountId = *a.AccountID
}
if a.Config != nil {
ca.Config = *a.Config
} else {
ca.Config = DefaultAccountConfig()
}
return ca
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config AccountConfig `json:"config"`
Status AccountStatus `json:"status"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func DefaultAccountConfig() AccountConfig {
return AccountConfig{
EnabledRegions: []string{},
}
}
type AccountConfig struct {
EnabledRegions []string `json:"regions"`
}
// For serializing from db
func (c *AccountConfig) Scan(src any) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// For serializing to db
func (c *AccountConfig) Value() (driver.Value, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
}
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
}
// For serializing from db
func (r *AgentReport) Scan(src any) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, r)
}
// For serializing to db
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
)
}
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
}
type CloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
Identifiable
TimeAuditable
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
Config CloudServiceConfig `bun:"config,type:text"`
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
}
type CloudServiceLogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type CloudServiceMetricsConfig struct {
Enabled bool `json:"enabled"`
}
type CloudServiceConfig struct {
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
}
// For serializing from db
func (c *CloudServiceConfig) Scan(src any) error {
var data []byte
switch src := src.(type) {
case []byte:
data = src
case string:
data = []byte(src)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// For serializing to db
func (c *CloudServiceConfig) Value() (driver.Value, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud service config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
}

View File

@@ -0,0 +1,466 @@
package integrationstypes
import (
"context"
"database/sql/driver"
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
// CloudProvider defines the interface to be implemented by different cloud providers.
// This is generic interface so it will be accepting and returning generic types instead of concrete.
// It's the cloud provider's responsibility to cast them to appropriate types and validate
type CloudProvider interface {
GetName() CloudProviderType
AgentCheckIn(ctx context.Context, req *PostableAgentCheckInPayload) (any, error)
GenerateConnectionArtifact(ctx context.Context, req *PostableConnectionArtifact) (any, error)
GetAccountStatus(ctx context.Context, orgID, accountID string) (*GettableAccountStatus, error)
ListServices(ctx context.Context, orgID string, accountID *string) (any, error) // returns either GettableAWSServices
GetServiceDetails(ctx context.Context, req *GetServiceDetailsReq) (any, error)
ListConnectedAccounts(ctx context.Context, orgID string) (*GettableConnectedAccountsList, error)
GetDashboard(ctx context.Context, req *GettableDashboard) (*dashboardtypes.Dashboard, error)
GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
UpdateAccountConfig(ctx context.Context, req *PatchableAccountConfig) (any, error) // req can be either PatchableAWSAccountConfig
UpdateServiceConfig(ctx context.Context, req *PatchableServiceConfig) (any, error)
DisconnectAccount(ctx context.Context, orgID, accountID string) (*CloudIntegration, error)
}
type GettableDashboard struct {
ID string
OrgID valuer.UUID
}
type GettableCloudIntegrationConnectionParams struct {
IngestionUrl string `json:"ingestion_url,omitempty"`
IngestionKey string `json:"ingestion_key,omitempty"`
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
}
type GettableIngestionKey struct {
Name string `json:"name"`
Value string `json:"value"`
// other attributes from gateway response not included here since they are not being used.
}
type GettableIngestionKeysSearch struct {
Status string `json:"status"`
Data []GettableIngestionKey `json:"data"`
Error string `json:"error"`
}
type GettableCreateIngestionKey struct {
Status string `json:"status"`
Data GettableIngestionKey `json:"data"`
Error string `json:"error"`
}
type GettableDeployment struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
}
type GettableConnectedAccountsList struct {
Accounts []*Account `json:"accounts"`
}
// SigNozAWSAgentConfig represents requirements for agent deployment in user's AWS account
type SigNozAWSAgentConfig struct {
// The region in which SigNoz agent should be installed.
Region string `json:"region"`
IngestionUrl string `json:"ingestion_url"`
IngestionKey string `json:"ingestion_key"`
SigNozAPIUrl string `json:"signoz_api_url"`
SigNozAPIKey string `json:"signoz_api_key"`
Version string `json:"version,omitempty"`
}
type PostableConnectionArtifact struct {
OrgID string
Data []byte // either PostableAWSConnectionUrl
}
type PostableAWSConnectionUrl struct {
// Optional. To be specified for updates.
// TODO: evaluate and remove if not needed.
AccountId *string `json:"account_id,omitempty"`
AccountConfig *AWSAccountConfig `json:"account_config"`
AgentConfig *SigNozAWSAgentConfig `json:"agent_config"`
}
func (p *PostableAWSConnectionUrl) Unmarshal(src any) error {
var data []byte
switch src := src.(type) {
case []byte:
data = src
case string:
data = []byte(src)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
err := json.Unmarshal(data, p)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize aws connection url request from JSON",
)
}
return nil
}
type GettableAWSConnectionUrl struct {
AccountId string `json:"account_id"`
ConnectionUrl string `json:"connection_url"`
}
type GettableAccountStatus struct {
Id string `json:"id"`
CloudAccountId *string `json:"cloud_account_id,omitempty"`
Status AccountStatus `json:"status"`
}
type PostableAgentCheckInPayload struct {
ID string `json:"account_id"`
AccountID string `json:"cloud_account_id"`
// Arbitrary cloud specific Agent data
Data map[string]any `json:"data,omitempty"`
OrgID string `json:"-"`
}
type GettableAWSAgentCheckIn struct {
AccountId string `json:"account_id"`
CloudAccountId string `json:"cloud_account_id"`
RemovedAt *time.Time `json:"removed_at"`
IntegrationConfig AWSAgentIntegrationConfig `json:"integration_config"`
}
type AWSAgentIntegrationConfig struct {
EnabledRegions []string `json:"enabled_regions"`
TelemetryCollectionStrategy *AWSCollectionStrategy `json:"telemetry,omitempty"`
}
type PatchableServiceConfig struct {
OrgID string `json:"org_id"`
ServiceId string `json:"service_id"`
Config []byte `json:"config"` // json serialized config
}
type PatchableAWSCloudServiceConfig struct {
CloudAccountId string `json:"cloud_account_id"`
Config *AWSCloudServiceConfig `json:"config"`
}
type AWSCloudServiceConfig struct {
Logs *AWSCloudServiceLogsConfig `json:"logs,omitempty"`
Metrics *AWSCloudServiceMetricsConfig `json:"metrics,omitempty"`
}
// Unmarshal unmarshalls data from src
func (c *PatchableAWSCloudServiceConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, c)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize aws service config req from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (c *PatchableAWSCloudServiceConfig) Marshal() ([]byte, error) {
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize aws service config req to JSON",
)
}
return serialized, nil
}
// Unmarshal unmarshalls data from src
func (a *AWSCloudServiceConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, a)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (a *AWSCloudServiceConfig) Marshal() ([]byte, error) {
serialized, err := json.Marshal(a)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
return serialized, nil
}
func (a *AWSCloudServiceConfig) Validate(def *AWSServiceDefinition) error {
if def.Id != S3Sync && a.Logs != nil && a.Logs.S3Buckets != nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "s3 buckets can only be added to service-type[%s]", S3Sync)
} else if def.Id == S3Sync && a.Logs != nil && a.Logs.S3Buckets != nil {
for region := range a.Logs.S3Buckets {
if _, found := ValidAWSRegions[region]; !found {
return errors.NewInvalidInputf(CodeInvalidCloudRegion, "invalid cloud region: %s", region)
}
}
}
return nil
}
type PatchServiceConfigResponse struct {
ServiceId string `json:"id"`
Config any `json:"config"`
}
type PatchableAccountConfig struct {
OrgID string
AccountId string
Data []byte // can be either AWSAccountConfig
}
type PatchableAWSAccountConfig struct {
Config *AWSAccountConfig `json:"config"`
}
func (p *PatchableAWSAccountConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, p)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize patchable account config from JSON",
)
}
return nil
}
func (p *PatchableAWSAccountConfig) Marshal() ([]byte, error) {
if p == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "patchable account config is nil")
}
serialized, err := json.Marshal(p)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize patchable account config to JSON",
)
}
return serialized, nil
}
type AWSAccountConfig struct {
EnabledRegions []string `json:"regions"`
}
// Unmarshal unmarshalls data from src
func (c *AWSAccountConfig) Unmarshal(src []byte) error {
err := json.Unmarshal(src, c)
if err != nil {
return errors.WrapInternalf(
err, errors.CodeInternal, "couldn't deserialize AWS account config from JSON",
)
}
return nil
}
// Marshal serializes data to bytes
func (c *AWSAccountConfig) Marshal() ([]byte, error) {
if c == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
}
serialized, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
return serialized, nil
}
type GettableAWSServices struct {
Services []AWSServiceSummary `json:"services"`
}
type GetServiceDetailsReq struct {
OrgID valuer.UUID
ServiceId string
CloudAccountID *string
}
// --------------------------------------------------------------------------
// DATABASE TYPES
// --------------------------------------------------------------------------
// --------------------------------------------------------------------------
// Cloud integration uses the cloud_integration table
// and cloud_integrations_service table
// --------------------------------------------------------------------------
type CloudIntegration struct {
bun.BaseModel `bun:"table:cloud_integration"`
types.Identifiable
types.TimeAuditable
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
Config string `json:"config" bun:"config,type:text"` // json serialized config
AccountID *string `json:"account_id" bun:"account_id,type:text"`
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
OrgID string `bun:"org_id,type:text,unique:provider_id"`
}
func (a *CloudIntegration) Status() AccountStatus {
status := AccountStatus{}
if a.LastAgentReport != nil {
lastHeartbeat := a.LastAgentReport.TimestampMillis
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
}
return status
}
func (a *CloudIntegration) Account(cloudProvider CloudProviderType) *Account {
ca := &Account{Id: a.ID.StringValue(), Status: a.Status()}
if a.AccountID != nil {
ca.CloudAccountId = *a.AccountID
}
ca.Config = map[string]interface{}{}
if len(a.Config) < 1 {
return ca
}
switch cloudProvider {
case CloudProviderAWS:
config := new(AWSAccountConfig)
_ = config.Unmarshal([]byte(a.Config))
ca.Config = config
default:
}
return ca
}
type Account struct {
Id string `json:"id"`
CloudAccountId string `json:"cloud_account_id"`
Config any `json:"config"` // AWSAccountConfig
Status AccountStatus `json:"status"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
func DefaultAWSAccountConfig() AWSAccountConfig {
return AWSAccountConfig{
EnabledRegions: []string{},
}
}
type AWSServiceSummary struct {
DefinitionMetadata
Config *AWSCloudServiceConfig `json:"config"`
}
type GettableAWSServiceDetails struct {
AWSServiceDefinition
Config *AWSCloudServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type ServiceConnectionStatus struct {
Logs []*SignalConnectionStatus `json:"logs"`
Metrics []*SignalConnectionStatus `json:"metrics"`
}
type SignalConnectionStatus struct {
CategoryID string `json:"category"`
CategoryDisplayName string `json:"category_display_name"`
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
type AgentReport struct {
TimestampMillis int64 `json:"timestamp_millis"`
Data map[string]any `json:"data"`
}
// Scan scans data from db
func (r *AgentReport) Scan(src any) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, r)
}
// Value serializes data to bytes for db insertion
func (r *AgentReport) Value() (driver.Value, error) {
if r == nil {
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
}
serialized, err := json.Marshal(r)
if err != nil {
return nil, errors.WrapInternalf(
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
)
}
return string(serialized), nil
}
type CloudIntegrationService struct {
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
types.Identifiable
types.TimeAuditable
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
Config string `bun:"config,type:text"` // json serialized config
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
}
type AWSCloudServiceLogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type AWSCloudServiceMetricsConfig struct {
Enabled bool `json:"enabled"`
}

View File

@@ -0,0 +1,183 @@
package integrationstypes
import (
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
S3Sync = "s3sync"
)
type DefinitionMetadata struct {
Id string `json:"id"`
Title string `json:"title"`
Icon string `json:"icon"`
}
type Definition interface {
GetId() string
Validate() error
PopulateDashboardURLs(svcId string)
}
var _ Definition = &AWSServiceDefinition{}
type AWSServiceDefinition struct {
DefinitionMetadata
Overview string `json:"overview"` // markdown
Assets Assets `json:"assets"`
SupportedSignals SupportedSignals `json:"supported_signals"`
DataCollected DataCollected `json:"data_collected"`
Strategy *AWSCollectionStrategy `json:"telemetry_collection_strategy"`
IngestionStatusCheck *IngestionStatusCheck `json:"ingestion_status_check"`
}
type IngestionStatusCheck struct {
Metrics []*IngestionStatusCheckCategory `json:"metrics"`
Logs []*IngestionStatusCheckCategory `json:"logs"`
}
type IngestionStatusCheckCategory struct {
Category string `json:"category"`
DisplayName string `json:"display_name"`
Checks []*IngestionStatusCheckAttribute `json:"checks"`
}
type IngestionStatusCheckAttribute struct {
Key string `json:"key"` // search key (metric name or log message)
Attributes []*IngestionStatusCheckAttributeFilter `json:"attributes"`
}
type IngestionStatusCheckAttributeFilter struct {
Name string `json:"name"`
Operator string `json:"operator"`
Value string `json:"value"`
}
func (def *AWSServiceDefinition) GetId() string {
return def.Id
}
func (def *AWSServiceDefinition) Validate() error {
seenDashboardIds := map[string]interface{}{}
if def.Strategy == nil {
return errors.NewInternalf(errors.CodeInternal, "telemetry_collection_strategy is required")
}
for _, dd := range def.Assets.Dashboards {
if _, seen := seenDashboardIds[dd.Id]; seen {
return errors.NewInternalf(errors.CodeInternal, "multiple dashboards found with id %s for AWS Integration", dd.Id)
}
seenDashboardIds[dd.Id] = nil
}
return nil
}
func (def *AWSServiceDefinition) PopulateDashboardURLs(serviceId string) {
for i := range def.Assets.Dashboards {
dashboardId := def.Assets.Dashboards[i].Id
url := "/dashboard/" + GetCloudIntegrationDashboardID(CloudProviderAWS, serviceId, dashboardId)
def.Assets.Dashboards[i].Url = url
}
}
type Assets struct {
Dashboards []Dashboard `json:"dashboards"`
}
type SupportedSignals struct {
Logs bool `json:"logs"`
Metrics bool `json:"metrics"`
}
type DataCollected struct {
Logs []CollectedLogAttribute `json:"logs"`
Metrics []CollectedMetric `json:"metrics"`
}
type CollectedLogAttribute struct {
Name string `json:"name"`
Path string `json:"path"`
Type string `json:"type"`
}
type CollectedMetric struct {
Name string `json:"name"`
Type string `json:"type"`
Unit string `json:"unit"`
Description string `json:"description"`
}
type AWSCollectionStrategy struct {
Provider valuer.String `json:"provider"`
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type
}
type AWSMetricsStrategy struct {
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
StreamFilters []struct {
// json tags here are in the shape expected by AWS API as detailed at
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
Namespace string `json:"Namespace"`
MetricNames []string `json:"MetricNames,omitempty"`
} `json:"cloudwatch_metric_stream_filters"`
}
type AWSLogsStrategy struct {
Subscriptions []struct {
// subscribe to all logs groups with specified prefix.
// eg: `/aws/rds/`
LogGroupNamePrefix string `json:"log_group_name_prefix"`
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
// "" implies no filtering is required.
FilterPattern string `json:"filter_pattern"`
} `json:"cloudwatch_logs_subscriptions"`
}
type Dashboard struct {
Id string `json:"id"`
Url string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
}
func GetCloudIntegrationDashboardID(cloudProvider valuer.String, svcId, dashboardId string) string {
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcId, dashboardId)
}
func GetDashboardsFromAssets(svcId string, cloudProvider CloudProviderType, createdAt *time.Time, assets Assets) []*dashboardtypes.Dashboard {
dashboards := make([]*dashboardtypes.Dashboard, 0)
for _, d := range assets.Dashboards {
author := fmt.Sprintf("%s-integration", cloudProvider)
dashboards = append(dashboards, &dashboardtypes.Dashboard{
ID: GetCloudIntegrationDashboardID(cloudProvider, svcId, d.Id),
Locked: true,
Data: *d.Definition,
TimeAuditable: types.TimeAuditable{
CreatedAt: *createdAt,
UpdatedAt: *createdAt,
},
UserAuditable: types.UserAuditable{
CreatedBy: author,
UpdatedBy: author,
},
})
}
return dashboards
}

View File

@@ -1,4 +1,4 @@
package cloudintegrations
package integrationstypes
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -0,0 +1,106 @@
package integrationstypes
import (
"database/sql/driver"
"encoding/json"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
// CloudProviderType type alias
type CloudProviderType = valuer.String
var (
CloudProviderAWS = valuer.NewString("aws")
)
var (
CodeCloudProviderInvalidInput = errors.MustNewCode("invalid_cloud_provider")
)
func NewCloudProvider(provider string) (CloudProviderType, error) {
switch provider {
case CloudProviderAWS.String():
return valuer.NewString(provider), nil
default:
return CloudProviderType{}, errors.NewInvalidInputf(CodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
}
}
var (
AWSIntegrationUserEmail = valuer.MustNewEmail("aws-integration@signoz.io")
)
var IntegrationUserEmails = []valuer.Email{
AWSIntegrationUserEmail,
}
func IsCloudIntegrationDashboardUuid(dashboardUuid string) bool {
parts := strings.SplitN(dashboardUuid, "--", 4)
if len(parts) != 4 {
return false
}
return parts[0] == "cloud-integration"
}
func GetCloudProviderFromDashboardID(dashboardUuid string) (CloudProviderType, error) {
parts := strings.SplitN(dashboardUuid, "--", 4)
if len(parts) != 4 {
return valuer.String{}, errors.NewInvalidInputf(CodeCloudProviderInvalidInput, "invalid dashboard uuid: %s", dashboardUuid)
}
providerStr := parts[1]
cloudProvider, err := NewCloudProvider(providerStr)
if err != nil {
return CloudProviderType{}, err
}
return cloudProvider, nil
}
// --------------------------------------------------------------------------
// Normal integration uses just the installed_integration table
// --------------------------------------------------------------------------
type InstalledIntegration struct {
bun.BaseModel `bun:"table:installed_integration"`
types.Identifiable
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
}
type InstalledIntegrationConfig map[string]interface{}
// Scan scans data from db
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
var data []byte
switch v := src.(type) {
case []byte:
data = v
case string:
data = []byte(v)
default:
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
}
return json.Unmarshal(data, c)
}
// Value serializes data to db
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(c)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not serialize integration config to JSON")
}
return filterSetJson, nil
}

View File

@@ -265,46 +265,6 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
ruleUnit: "",
shouldAlert: true,
},
// bytes and Gibibytes,
// rule will only fire if target is converted to bytes so that the sample value becomes lower than the target 100GiBy
{
name: "bytes to Gibibytes - should alert",
threshold: BasicRuleThreshold{
Name: CriticalThresholdName,
TargetValue: &target, // 100 Gibibytes
TargetUnit: "GiBy",
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 70 * 1024 * 1024 * 1024, Timestamp: 1000}, // 70 Gibibytes
},
},
ruleUnit: "bytes",
shouldAlert: true,
},
// data Rate conversion - bytes per second to MiB per second
// rule will only fire if target is converted to bytes so that the sample value becomes lower than the target 100 MiB/s
{
name: "bytes per second to MiB per second - should alert",
threshold: BasicRuleThreshold{
Name: CriticalThresholdName,
TargetValue: &target, // 100 MiB/s
TargetUnit: "MiBy/s",
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 30 * 1024 * 1024, Timestamp: 1000}, // 30 MiB/s
},
},
ruleUnit: "By/s",
shouldAlert: true,
},
}
for _, tt := range tests {

View File

@@ -139,5 +139,5 @@ def test_generate_connection_url_unsupported_provider(
response_data = response.json()
assert "error" in response_data, "Response should contain 'error' field"
assert (
"unsupported cloud provider" in response_data["error"].lower()
"invalid cloud provider: gcp" in response_data["error"]["message"]
), "Error message should indicate unsupported provider"

View File

@@ -193,7 +193,6 @@ def test_get_service_details_with_account(
assert "overview" in data, "Service details should have 'overview' field"
assert "assets" in data, "Service details should have 'assets' field"
assert "config" in data, "Service details should have 'config' field"
assert "status" in data, "Config should have 'status' field"
@@ -347,8 +346,8 @@ def test_update_service_config_without_account(
)
assert (
response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
), f"Expected 500 for non-existent account, got {response.status_code}"
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 400 for non-existent account, got {response.status_code}"