mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-16 22:22:14 +00:00
Compare commits
33 Commits
fix/root-u
...
feat/azure
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
73453ec8c6 | ||
|
|
b333e44d13 | ||
|
|
5bd51df854 | ||
|
|
989ca522f8 | ||
|
|
9a2e9d76b5 | ||
|
|
2be42deecd | ||
|
|
95cad880cc | ||
|
|
cfef1091b3 | ||
|
|
4504c364f2 | ||
|
|
1a006870e1 | ||
|
|
e7a27a1cfb | ||
|
|
1e7323ead2 | ||
|
|
af4c6c5b52 | ||
|
|
02262ba245 | ||
|
|
df7c9e1339 | ||
|
|
ac5e52479f | ||
|
|
de56477bbb | ||
|
|
fddd8a27fa | ||
|
|
2aa4f8e237 | ||
|
|
74006a214b | ||
|
|
ed2cbacadc | ||
|
|
3cbd529843 | ||
|
|
78b481e895 | ||
|
|
215098ec0d | ||
|
|
5a4ef2e4ce | ||
|
|
b1f33c4f7f | ||
|
|
713c84b1e4 | ||
|
|
c3daf9e428 | ||
|
|
70a908deb1 | ||
|
|
cc9cdded3c | ||
|
|
77067cd614 | ||
|
|
ab703d9a65 | ||
|
|
611e8fbf9e |
@@ -1,4 +1,4 @@
|
||||
FROM node:22-bookworm AS build
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
|
||||
@@ -309,14 +309,3 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
# deleted, updated, or have their password changed through the UI.
|
||||
enabled: false
|
||||
# The email address of the root user.
|
||||
email: ""
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
|
||||
@@ -4678,8 +4678,6 @@ components:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
isRoot:
|
||||
type: boolean
|
||||
orgId:
|
||||
type: string
|
||||
role:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
@@ -13,7 +14,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -30,13 +31,14 @@ type APIHandlerOptions struct {
|
||||
RulesManager *rules.Manager
|
||||
UsageManager *usage.Manager
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
CloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
GlobalConfig global.Config
|
||||
Logger *slog.Logger // this is present in Signoz.Instrumentation but adding for quick access
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
@@ -45,12 +47,12 @@ type APIHandler struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
RuleManager: opts.RulesManager,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
CloudIntegrationsRegistry: opts.CloudIntegrationsRegistry,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
@@ -58,7 +60,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -118,14 +120,12 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
}
|
||||
|
||||
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)
|
||||
|
||||
router.HandleFunc(
|
||||
"/api/v1/cloud-integrations/{cloudProvider}/accounts/generate-connection-params",
|
||||
am.EditAccess(ah.CloudIntegrationsGenerateConnectionParams),
|
||||
).Methods(http.MethodGet)
|
||||
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -13,20 +14,14 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type CloudIntegrationConnectionParamsResponse struct {
|
||||
IngestionUrl string `json:"ingestion_url,omitempty"`
|
||||
IngestionKey string `json:"ingestion_key,omitempty"`
|
||||
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
|
||||
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
|
||||
}
|
||||
// TODO: move this file with other cloud integration related code
|
||||
|
||||
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
@@ -41,23 +36,21 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
if cloudProvider != "aws" {
|
||||
RespondError(w, basemodel.BadRequest(fmt.Errorf(
|
||||
"cloud provider not supported: %s", cloudProvider,
|
||||
)), nil)
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't provision PAT for cloud integration:",
|
||||
), nil)
|
||||
apiKey, err := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
result := CloudIntegrationConnectionParamsResponse{
|
||||
result := integrationstypes.GettableCloudIntegrationConnectionParams{
|
||||
SigNozAPIKey: apiKey,
|
||||
}
|
||||
|
||||
@@ -71,16 +64,17 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
// Return the API Key (PAT) even if the rest of the params can not be deduced.
|
||||
// Params not returned from here will be requested from the user via form inputs.
|
||||
// This enables gracefully degraded but working experience even for non-cloud deployments.
|
||||
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
|
||||
ah.Respond(w, result)
|
||||
ah.opts.Logger.InfoContext(
|
||||
r.Context(),
|
||||
"ingestion params and signoz api url can not be deduced since no license was found",
|
||||
)
|
||||
render.Success(w, http.StatusOK, result)
|
||||
return
|
||||
}
|
||||
|
||||
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
), nil)
|
||||
signozApiUrl, err := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -89,48 +83,41 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
|
||||
gatewayUrl := ah.opts.GatewayUrl
|
||||
if len(gatewayUrl) > 0 {
|
||||
|
||||
ingestionKey, apiErr := getOrCreateCloudProviderIngestionKey(
|
||||
ingestionKeyString, err := ah.getOrCreateCloudProviderIngestionKey(
|
||||
r.Context(), gatewayUrl, license.Key, cloudProvider,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't get or create ingestion key",
|
||||
), nil)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
result.IngestionKey = ingestionKey
|
||||
|
||||
result.IngestionKey = ingestionKeyString
|
||||
} else {
|
||||
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
|
||||
ah.opts.Logger.InfoContext(
|
||||
r.Context(),
|
||||
"ingestion key can't be deduced since no gateway url has been configured",
|
||||
)
|
||||
}
|
||||
|
||||
ah.Respond(w, result)
|
||||
render.Success(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider string) (
|
||||
string, *basemodel.ApiError,
|
||||
) {
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider valuer.String) (string, error) {
|
||||
integrationPATName := fmt.Sprintf("%s integration", cloudProvider)
|
||||
|
||||
integrationUser, apiErr := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
|
||||
if apiErr != nil {
|
||||
return "", apiErr
|
||||
integrationUser, err := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
orgIdUUID, err := valuer.NewUUID(orgId)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't parse orgId: %w", err,
|
||||
))
|
||||
return "", err
|
||||
}
|
||||
|
||||
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't list PATs: %w", err,
|
||||
))
|
||||
return "", err
|
||||
}
|
||||
for _, p := range allPats {
|
||||
if p.UserID == integrationUser.ID && p.Name == integrationPATName {
|
||||
@@ -138,9 +125,10 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
}
|
||||
}
|
||||
|
||||
zap.L().Info(
|
||||
ah.opts.Logger.InfoContext(
|
||||
ctx,
|
||||
"no PAT found for cloud integration, creating a new one",
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
slog.String("cloudProvider", cloudProvider.String()),
|
||||
)
|
||||
|
||||
newPAT, err := types.NewStorableAPIKey(
|
||||
@@ -150,68 +138,48 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
0,
|
||||
)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
return "", err
|
||||
}
|
||||
|
||||
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration PAT: %w", err,
|
||||
))
|
||||
return "", err
|
||||
}
|
||||
return newPAT.Token, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
// TODO: move this function out of handler and use proper module structure
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(ctx context.Context, orgId string, cloudProvider valuer.String) (*types.User, error) {
|
||||
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider.String())
|
||||
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
|
||||
|
||||
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
|
||||
|
||||
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cloudIntegrationUser, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, *basemodel.ApiError,
|
||||
) {
|
||||
// TODO: remove this struct from here
|
||||
type deploymentResponse struct {
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
// TODO: move this function out of handler and use proper module structure
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (string, error) {
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
))
|
||||
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't query for deployment info: error")
|
||||
}
|
||||
|
||||
resp := new(deploymentResponse)
|
||||
resp := new(integrationstypes.GettableDeployment)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal deployment info response")
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
@@ -219,9 +187,11 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", errors.WrapInternalf(
|
||||
err,
|
||||
errors.CodeInternal,
|
||||
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
|
||||
@@ -229,102 +199,85 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
|
||||
return signozApiUrl, nil
|
||||
}
|
||||
|
||||
type ingestionKey struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
// other attributes from gateway response not included here since they are not being used.
|
||||
}
|
||||
|
||||
type ingestionKeysSearchResponse struct {
|
||||
Status string `json:"status"`
|
||||
Data []ingestionKey `json:"data"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
type createIngestionKeyResponse struct {
|
||||
Status string `json:"status"`
|
||||
Data ingestionKey `json:"data"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
func getOrCreateCloudProviderIngestionKey(
|
||||
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider string,
|
||||
) (string, *basemodel.ApiError) {
|
||||
func (ah *APIHandler) getOrCreateCloudProviderIngestionKey(
|
||||
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider valuer.String,
|
||||
) (string, error) {
|
||||
cloudProviderKeyName := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
|
||||
// see if the key already exists
|
||||
searchResult, apiErr := requestGateway[ingestionKeysSearchResponse](
|
||||
searchResult, err := requestGateway[integrationstypes.GettableIngestionKeysSearch](
|
||||
ctx,
|
||||
gatewayUrl,
|
||||
licenseKey,
|
||||
fmt.Sprintf("/v1/workspaces/me/keys/search?name=%s", cloudProviderKeyName),
|
||||
nil,
|
||||
ah.opts.Logger,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't search for cloudprovider ingestion key",
|
||||
)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if searchResult.Status != "success" {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't search for cloudprovider ingestion key: status: %s, error: %s",
|
||||
return "", errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"couldn't search for cloud provider ingestion key: status: %s, error: %s",
|
||||
searchResult.Status, searchResult.Error,
|
||||
))
|
||||
}
|
||||
|
||||
for _, k := range searchResult.Data {
|
||||
if k.Name == cloudProviderKeyName {
|
||||
if len(k.Value) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"ingestion keys search response not as expected",
|
||||
))
|
||||
}
|
||||
|
||||
return k.Value, nil
|
||||
}
|
||||
}
|
||||
|
||||
zap.L().Info(
|
||||
"no existing ingestion key found for cloud integration, creating a new one",
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
|
||||
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
|
||||
map[string]any{
|
||||
"name": cloudProviderKeyName,
|
||||
"tags": []string{"integration", cloudProvider},
|
||||
},
|
||||
)
|
||||
if apiErr != nil {
|
||||
return "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't create cloudprovider ingestion key",
|
||||
)
|
||||
}
|
||||
|
||||
for _, k := range searchResult.Data {
|
||||
if k.Name != cloudProviderKeyName {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(k.Value) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", errors.NewInternalf(errors.CodeInternal, "ingestion keys search response not as expected")
|
||||
}
|
||||
|
||||
return k.Value, nil
|
||||
}
|
||||
|
||||
ah.opts.Logger.InfoContext(
|
||||
ctx,
|
||||
"no existing ingestion key found for cloud integration, creating a new one",
|
||||
slog.String("cloudProvider", cloudProvider.String()),
|
||||
)
|
||||
|
||||
createKeyResult, err := requestGateway[integrationstypes.GettableCreateIngestionKey](
|
||||
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
|
||||
map[string]any{
|
||||
"name": cloudProviderKeyName,
|
||||
"tags": []string{"integration", cloudProvider.String()},
|
||||
},
|
||||
ah.opts.Logger,
|
||||
)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if createKeyResult.Status != "success" {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloudprovider ingestion key: status: %s, error: %s",
|
||||
return "", errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"couldn't create cloud provider ingestion key: status: %s, error: %s",
|
||||
createKeyResult.Status, createKeyResult.Error,
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
ingestionKey := createKeyResult.Data.Value
|
||||
if len(ingestionKey) < 1 {
|
||||
ingestionKeyString := createKeyResult.Data.Value
|
||||
if len(ingestionKeyString) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", errors.NewInternalf(errors.CodeInternal,
|
||||
"ingestion key creation response not as expected",
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
return ingestionKey, nil
|
||||
return ingestionKeyString, nil
|
||||
}
|
||||
|
||||
func requestGateway[ResponseType any](
|
||||
ctx context.Context, gatewayUrl string, licenseKey string, path string, payload any,
|
||||
) (*ResponseType, *basemodel.ApiError) {
|
||||
ctx context.Context, gatewayUrl, licenseKey, path string, payload any, logger *slog.Logger,
|
||||
) (*ResponseType, error) {
|
||||
|
||||
baseUrl := strings.TrimSuffix(gatewayUrl, "/")
|
||||
reqUrl := fmt.Sprintf("%s%s", baseUrl, path)
|
||||
@@ -335,13 +288,12 @@ func requestGateway[ResponseType any](
|
||||
"X-Consumer-Groups": "ns:default",
|
||||
}
|
||||
|
||||
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload)
|
||||
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload, logger)
|
||||
}
|
||||
|
||||
func requestAndParseResponse[ResponseType any](
|
||||
ctx context.Context, url string, headers map[string]string, payload any,
|
||||
) (*ResponseType, *basemodel.ApiError) {
|
||||
|
||||
ctx context.Context, url string, headers map[string]string, payload any, logger *slog.Logger,
|
||||
) (*ResponseType, error) {
|
||||
reqMethod := http.MethodGet
|
||||
var reqBody io.Reader
|
||||
if payload != nil {
|
||||
@@ -349,18 +301,14 @@ func requestAndParseResponse[ResponseType any](
|
||||
|
||||
bodyJson, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't serialize request payload to JSON: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal payload")
|
||||
}
|
||||
reqBody = bytes.NewBuffer([]byte(bodyJson))
|
||||
reqBody = bytes.NewBuffer(bodyJson)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, reqMethod, url, reqBody)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't prepare request: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't create req")
|
||||
}
|
||||
|
||||
for k, v := range headers {
|
||||
@@ -373,23 +321,26 @@ func requestAndParseResponse[ResponseType any](
|
||||
|
||||
response, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't make request: %w", err))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't make req")
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
defer func() {
|
||||
err = response.Body.Close()
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "couldn't close response body", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
respBody, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't read response: %w", err))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read response body")
|
||||
}
|
||||
|
||||
var resp ResponseType
|
||||
|
||||
err = json.Unmarshal(respBody, &resp)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal gateway response into %T", resp,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal response body")
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
|
||||
@@ -127,12 +127,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
)
|
||||
}
|
||||
|
||||
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't create cloud provider integrations controller: %w", err,
|
||||
)
|
||||
}
|
||||
cloudIntegrationsRegistry := cloudintegrations.NewCloudProviderRegistry(signoz.Instrumentation.Logger(), signoz.SQLStore, signoz.Querier)
|
||||
|
||||
// ingestion pipelines manager
|
||||
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
|
||||
@@ -167,15 +162,16 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
RulesManager: rm,
|
||||
UsageManager: usageManager,
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
CloudIntegrationsRegistry: cloudIntegrationsRegistry,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
Logger: signoz.Instrumentation.Logger(),
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1542,10 +1542,6 @@ export interface TypesUserDTO {
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isRoot?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
44
pkg/http/middleware/recovery.go
Normal file
44
pkg/http/middleware/recovery.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
)
|
||||
|
||||
// Recovery is a middleware that recovers from panics, logs the panic,
|
||||
// and returns a 500 Internal Server Error.
|
||||
type Recovery struct {
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
// NewRecovery creates a new Recovery middleware.
|
||||
func NewRecovery(logger *slog.Logger) Wrapper {
|
||||
return &Recovery{
|
||||
logger: logger.With("pkg", "http-middleware-recovery"),
|
||||
}
|
||||
}
|
||||
|
||||
// Wrap is the middleware handler.
|
||||
func (m *Recovery) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
m.logger.ErrorContext(
|
||||
r.Context(),
|
||||
"panic recovered",
|
||||
"err", err, "stack", string(debug.Stack()),
|
||||
)
|
||||
|
||||
render.Error(w, errors.NewInternalf(
|
||||
errors.CodeInternal, "internal server error",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
@@ -30,7 +30,3 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
|
||||
|
||||
return module.store.ListByKeyRange(ctx, start, end)
|
||||
}
|
||||
|
||||
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
return module.store.GetByName(ctx, name)
|
||||
}
|
||||
|
||||
@@ -47,22 +47,6 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
organization := new(types.Organization)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(organization).
|
||||
Where("name = ?", name).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
|
||||
}
|
||||
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
organizations := make([]*types.Organization, 0)
|
||||
err := store.
|
||||
|
||||
@@ -14,9 +14,6 @@ type Getter interface {
|
||||
|
||||
// ListByOwnedKeyRange gets all the organizations owned by the instance
|
||||
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
|
||||
|
||||
// Gets the organization by name
|
||||
GetByName(context.Context, string) (*types.Organization, error)
|
||||
}
|
||||
|
||||
type Setter interface {
|
||||
|
||||
@@ -151,10 +151,6 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -5,22 +5,11 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Password PasswordConfig `mapstructure:"password"`
|
||||
Root RootConfig `mapstructure:"root"`
|
||||
}
|
||||
|
||||
type RootConfig struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
Email valuer.Email `mapstructure:"email"`
|
||||
Password string `mapstructure:"password"`
|
||||
OrgName string `mapstructure:"org_name"`
|
||||
}
|
||||
|
||||
type PasswordConfig struct {
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
}
|
||||
@@ -42,10 +31,6 @@ func newConfig() factory.Config {
|
||||
MaxTokenLifetime: 6 * time.Hour,
|
||||
},
|
||||
},
|
||||
Root: RootConfig{
|
||||
Enabled: false,
|
||||
OrgName: "default",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,17 +39,5 @@ func (c Config) Validate() error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Root.Enabled {
|
||||
if c.Root.Email.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
|
||||
}
|
||||
if c.Root.Password == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
|
||||
}
|
||||
if !types.IsPasswordValid(c.Root.Password) {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -16,10 +16,6 @@ func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
return module.store.GetRootUserByOrgID(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
users, err := module.store.ListUsersByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
root "github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -462,7 +463,7 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
|
||||
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
|
||||
return
|
||||
}
|
||||
@@ -507,7 +508,7 @@ func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
|
||||
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
|
||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/dustin/go-humanize"
|
||||
@@ -103,12 +104,6 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
|
||||
}
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
|
||||
}
|
||||
@@ -173,7 +168,7 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID)
|
||||
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
|
||||
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
|
||||
// since assign is idempotent multiple calls to assign won't cause issues in case of retries.
|
||||
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -208,21 +203,27 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot update root user")
|
||||
}
|
||||
|
||||
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
// only displayName, role can be updated
|
||||
if user.DisplayName == "" {
|
||||
user.DisplayName = existingUser.DisplayName
|
||||
}
|
||||
|
||||
if user.Role == "" {
|
||||
user.Role = existingUser.Role
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
|
||||
}
|
||||
|
||||
// Make sure that the request is not demoting the last admin user.
|
||||
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
// Make sure that th e request is not demoting the last admin user.
|
||||
// also an admin user can only change role of their own or other user
|
||||
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -233,7 +234,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
if user.Role != "" && user.Role != existingUser.Role {
|
||||
if user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
@@ -245,28 +246,23 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
existingUser.Update(user.DisplayName, user.Role)
|
||||
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
user.UpdatedAt = time.Now()
|
||||
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return existingUser, nil
|
||||
}
|
||||
traits := types.NewTraitsFromUser(updatedUser)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
|
||||
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
|
||||
return err
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
return updatedUser, nil
|
||||
}
|
||||
|
||||
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
|
||||
@@ -275,11 +271,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot delete root user")
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
|
||||
if slices.Contains(integrationstypes.IntegrationUserEmails, user.Email) {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
|
||||
}
|
||||
|
||||
@@ -293,7 +285,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
}
|
||||
|
||||
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
|
||||
// since revoke is idempotent multiple calls to revoke won't cause issues in case of retries
|
||||
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -373,10 +365,6 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
|
||||
if err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
|
||||
@@ -420,15 +408,6 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
return err
|
||||
}
|
||||
|
||||
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
if err := password.Update(passwd); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -437,15 +416,6 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
}
|
||||
|
||||
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot change password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -507,7 +477,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
|
||||
}
|
||||
|
||||
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
|
||||
user, err := types.NewRootUser(name, email, organization.ID)
|
||||
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1,187 +0,0 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type service struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store types.UserStore
|
||||
module user.Module
|
||||
orgGetter organization.Getter
|
||||
authz authz.AuthZ
|
||||
config user.RootConfig
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
providerSettings factory.ProviderSettings,
|
||||
store types.UserStore,
|
||||
module user.Module,
|
||||
orgGetter organization.Getter,
|
||||
authz authz.AuthZ,
|
||||
config user.RootConfig,
|
||||
) user.Service {
|
||||
return &service{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
|
||||
store: store,
|
||||
module: module,
|
||||
orgGetter: orgGetter,
|
||||
authz: authz,
|
||||
config: config,
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error {
|
||||
if !s.config.Enabled {
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
err := s.reconcile(ctx)
|
||||
if err == nil {
|
||||
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
|
||||
|
||||
select {
|
||||
case <-s.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Stop(ctx context.Context) error {
|
||||
close(s.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) reconcile(ctx context.Context) error {
|
||||
org, err := s.orgGetter.GetByName(ctx, s.config.OrgName)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
newOrg := types.NewOrganizationWithName(s.config.OrgName)
|
||||
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return s.reconcileRootUser(ctx, org.ID)
|
||||
}
|
||||
|
||||
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingRoot == nil {
|
||||
return s.createOrPromoteRootUser(ctx, orgID)
|
||||
}
|
||||
|
||||
return s.updateExistingRootUser(ctx, orgID, existingRoot)
|
||||
}
|
||||
|
||||
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
oldRole := existingUser.Role
|
||||
|
||||
existingUser.PromoteToRoot()
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if oldRole != types.RoleAdmin {
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingUser.ID)
|
||||
}
|
||||
|
||||
// Create new root user
|
||||
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
|
||||
}
|
||||
|
||||
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
|
||||
existingRoot.PromoteToRoot()
|
||||
|
||||
if existingRoot.Email != s.config.Email {
|
||||
existingRoot.UpdateEmail(s.config.Email)
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingRoot.ID)
|
||||
}
|
||||
|
||||
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
|
||||
password, err := s.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.CreatePassword(ctx, factorPassword)
|
||||
}
|
||||
|
||||
if !password.Equals(s.config.Password) {
|
||||
if err := password.Update(s.config.Password); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.UpdatePassword(ctx, password)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -210,24 +210,20 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
|
||||
user.UpdatedAt = time.Now()
|
||||
_, err := store.sqlstore.BunDB().NewUpdate().
|
||||
Model(user).
|
||||
Column("display_name").
|
||||
Column("email").
|
||||
Column("role").
|
||||
Column("is_root").
|
||||
Column("updated_at").
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", user.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
|
||||
}
|
||||
return nil
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
|
||||
@@ -606,22 +602,6 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
|
||||
})
|
||||
}
|
||||
|
||||
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
user := new(types.User)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(user).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("is_root = ?", true).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
|
||||
users := []*types.User{}
|
||||
err := store.
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
package user
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type Service interface {
|
||||
factory.Service
|
||||
}
|
||||
@@ -34,9 +34,6 @@ type Module interface {
|
||||
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
|
||||
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
|
||||
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
|
||||
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
|
||||
|
||||
// invite
|
||||
@@ -57,9 +54,6 @@ type Module interface {
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get root user by org id.
|
||||
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
|
||||
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,814 @@
|
||||
package implawsprovider
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"slices"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
|
||||
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeInvalidAWSRegion = errors.MustNewCode("invalid_aws_region")
|
||||
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
|
||||
)
|
||||
|
||||
type awsProvider struct {
|
||||
logger *slog.Logger
|
||||
querier querier.Querier
|
||||
accountsRepo integrationstore.CloudProviderAccountsRepository
|
||||
serviceConfigRepo integrationstore.ServiceConfigDatabase
|
||||
awsServiceDefinitions *services.AWSServicesProvider
|
||||
}
|
||||
|
||||
func NewAWSCloudProvider(
|
||||
logger *slog.Logger,
|
||||
accountsRepo integrationstore.CloudProviderAccountsRepository,
|
||||
serviceConfigRepo integrationstore.ServiceConfigDatabase,
|
||||
querier querier.Querier,
|
||||
) integrationstypes.CloudProvider {
|
||||
awsServiceDefinitions, err := services.NewAWSCloudProviderServices()
|
||||
if err != nil {
|
||||
panic("failed to initialize AWS service definitions: " + err.Error())
|
||||
}
|
||||
|
||||
return &awsProvider{
|
||||
logger: logger,
|
||||
querier: querier,
|
||||
accountsRepo: accountsRepo,
|
||||
serviceConfigRepo: serviceConfigRepo,
|
||||
awsServiceDefinitions: awsServiceDefinitions,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *awsProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
|
||||
accountRecord, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &integrationstypes.GettableAccountStatus{
|
||||
Id: accountRecord.ID.String(),
|
||||
CloudAccountId: accountRecord.AccountID,
|
||||
Status: accountRecord.Status(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
|
||||
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
|
||||
for _, r := range accountRecords {
|
||||
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
|
||||
}
|
||||
|
||||
return &integrationstypes.GettableConnectedAccountsList{
|
||||
Accounts: connectedAccounts,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
|
||||
// agent can't check in unless the account is already created
|
||||
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"can't check in with new %s account id %s for account %s with existing %s id %s",
|
||||
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
|
||||
*existingAccount.AccountID,
|
||||
))
|
||||
}
|
||||
|
||||
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
|
||||
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"can't check in to %s account %s with id %s. already connected with id %s",
|
||||
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue(),
|
||||
))
|
||||
}
|
||||
|
||||
agentReport := integrationstypes.AgentReport{
|
||||
TimestampMillis: time.Now().UnixMilli(),
|
||||
Data: req.Data,
|
||||
}
|
||||
|
||||
account, err := a.accountsRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agentConfig, err := a.getAWSAgentConfig(ctx, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &integrationstypes.GettableAWSAgentCheckIn{
|
||||
AccountId: account.ID.StringValue(),
|
||||
CloudAccountId: *account.AccountID,
|
||||
RemovedAt: account.RemovedAt,
|
||||
IntegrationConfig: *agentConfig,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) getAWSAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AWSAgentIntegrationConfig, error) {
|
||||
// prepare and return integration config to be consumed by agent
|
||||
agentConfig := &integrationstypes.AWSAgentIntegrationConfig{
|
||||
EnabledRegions: []string{},
|
||||
TelemetryCollectionStrategy: &integrationstypes.AWSCollectionStrategy{
|
||||
Provider: a.GetName(),
|
||||
AWSMetrics: &integrationstypes.AWSMetricsStrategy{},
|
||||
AWSLogs: &integrationstypes.AWSLogsStrategy{},
|
||||
S3Buckets: map[string][]string{},
|
||||
},
|
||||
}
|
||||
|
||||
accountConfig := new(integrationstypes.AWSAccountConfig)
|
||||
err := accountConfig.Unmarshal(account.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if accountConfig != nil && accountConfig.EnabledRegions != nil {
|
||||
agentConfig.EnabledRegions = accountConfig.EnabledRegions
|
||||
}
|
||||
|
||||
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
|
||||
ctx, account.OrgID, account.ID.StringValue(),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// accumulate config in a fixed order to ensure same config generated across runs
|
||||
configuredServices := maps.Keys(svcConfigs)
|
||||
slices.Sort(configuredServices)
|
||||
|
||||
for _, svcType := range configuredServices {
|
||||
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, svcType)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
config := svcConfigs[svcType]
|
||||
|
||||
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if serviceConfig.Logs != nil && serviceConfig.Logs.Enabled {
|
||||
if svcType == integrationstypes.S3Sync {
|
||||
// S3 bucket sync; No cloudwatch logs are appended for this service type;
|
||||
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
|
||||
agentConfig.TelemetryCollectionStrategy.S3Buckets = serviceConfig.Logs.S3Buckets
|
||||
} else if definition.Strategy.AWSLogs != nil { // services that includes a logs subscription
|
||||
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions = append(
|
||||
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions,
|
||||
definition.Strategy.AWSLogs.Subscriptions...,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled && definition.Strategy.AWSMetrics != nil {
|
||||
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters = append(
|
||||
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters,
|
||||
definition.Strategy.AWSMetrics.StreamFilters...,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return agentConfig, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) GetName() integrationstypes.CloudProviderType {
|
||||
return integrationstypes.CloudProviderAWS
|
||||
}
|
||||
|
||||
func (a *awsProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
|
||||
svcConfigs := make(map[string]*integrationstypes.AWSCloudServiceConfig)
|
||||
if cloudAccountID != nil {
|
||||
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for svcType, config := range serviceConfigs {
|
||||
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
svcConfigs[svcType] = serviceConfig
|
||||
}
|
||||
}
|
||||
|
||||
summaries := make([]integrationstypes.AWSServiceSummary, 0)
|
||||
|
||||
definitions, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("couldn't list aws service definitions: %w", err))
|
||||
}
|
||||
|
||||
for _, def := range definitions {
|
||||
summary := integrationstypes.AWSServiceSummary{
|
||||
DefinitionMetadata: def.DefinitionMetadata,
|
||||
Config: nil,
|
||||
}
|
||||
|
||||
summary.Config = svcConfigs[summary.Id]
|
||||
|
||||
summaries = append(summaries, summary)
|
||||
}
|
||||
|
||||
sort.Slice(summaries, func(i, j int) bool {
|
||||
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
|
||||
})
|
||||
|
||||
return &integrationstypes.GettableAWSServices{
|
||||
Services: summaries,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
|
||||
details := new(integrationstypes.GettableAWSServiceDetails)
|
||||
|
||||
awsDefinition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
|
||||
}
|
||||
|
||||
details.AWSServiceDefinition = *awsDefinition
|
||||
details.Strategy.Provider = a.GetName()
|
||||
if req.CloudAccountID == nil {
|
||||
return details, nil
|
||||
}
|
||||
|
||||
config, err := a.getServiceConfig(ctx, &details.AWSServiceDefinition, req.OrgID, a.GetName().String(), req.ServiceId, *req.CloudAccountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if config == nil {
|
||||
return details, nil
|
||||
}
|
||||
|
||||
details.Config = config
|
||||
|
||||
connectionStatus, err := a.getServiceConnectionStatus(
|
||||
ctx,
|
||||
*req.CloudAccountID,
|
||||
req.OrgID,
|
||||
&details.AWSServiceDefinition,
|
||||
config,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
details.ConnectionStatus = connectionStatus
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) getServiceConnectionStatus(
|
||||
ctx context.Context,
|
||||
cloudAccountID string,
|
||||
orgID string,
|
||||
def *integrationstypes.AWSServiceDefinition,
|
||||
serviceConfig *integrationstypes.AWSCloudServiceConfig,
|
||||
) (*integrationstypes.ServiceConnectionStatus, error) {
|
||||
if def.Strategy == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
resp := new(integrationstypes.ServiceConnectionStatus)
|
||||
|
||||
wg := sync.WaitGroup{}
|
||||
|
||||
if def.Strategy.AWSMetrics != nil && serviceConfig.Metrics.Enabled {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
a.logger.ErrorContext(
|
||||
ctx, "panic while getting service metrics connection status",
|
||||
"error", r,
|
||||
"service", def.DefinitionMetadata.Id,
|
||||
)
|
||||
}
|
||||
}()
|
||||
defer wg.Done()
|
||||
status, _ := a.getServiceMetricsConnectionStatus(ctx, cloudAccountID, orgID, def)
|
||||
resp.Metrics = status
|
||||
}()
|
||||
}
|
||||
|
||||
if def.Strategy.AWSLogs != nil && serviceConfig.Logs.Enabled {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
a.logger.ErrorContext(
|
||||
ctx, "panic while getting service logs connection status",
|
||||
"error", r,
|
||||
"service", def.DefinitionMetadata.Id,
|
||||
)
|
||||
}
|
||||
}()
|
||||
defer wg.Done()
|
||||
status, _ := a.getServiceLogsConnectionStatus(ctx, cloudAccountID, orgID, def)
|
||||
resp.Logs = status
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) getServiceMetricsConnectionStatus(
|
||||
ctx context.Context,
|
||||
cloudAccountID string,
|
||||
orgID string,
|
||||
def *integrationstypes.AWSServiceDefinition,
|
||||
) ([]*integrationstypes.SignalConnectionStatus, error) {
|
||||
if def.Strategy == nil ||
|
||||
len(def.Strategy.AWSMetrics.StreamFilters) < 1 ||
|
||||
len(def.DataCollected.Metrics) < 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
|
||||
|
||||
for _, metric := range def.IngestionStatusCheck.Metrics {
|
||||
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
|
||||
CategoryID: metric.Category,
|
||||
CategoryDisplayName: metric.DisplayName,
|
||||
})
|
||||
}
|
||||
|
||||
for index, category := range def.IngestionStatusCheck.Metrics {
|
||||
queries := make([]qbtypes.QueryEnvelope, 0)
|
||||
|
||||
for _, check := range category.Checks {
|
||||
filterExpression := fmt.Sprintf(`cloud.provider="aws" AND cloud.account.id="%s"`, cloudAccountID)
|
||||
f := ""
|
||||
for _, attribute := range check.Attributes {
|
||||
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
|
||||
if attribute.Value != "" {
|
||||
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
|
||||
}
|
||||
|
||||
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
|
||||
}
|
||||
|
||||
queries = append(queries, qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: valuer.GenerateUUID().String(),
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{{
|
||||
MetricName: check.Key,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
}},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: filterExpression,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
resp, err := a.querier.QueryRange(ctx, valuer.MustNewUUID(orgID), &qbtypes.QueryRangeRequest{
|
||||
SchemaVersion: "v5",
|
||||
Start: uint64(time.Now().Add(-time.Hour).UnixMilli()),
|
||||
End: uint64(time.Now().UnixMilli()),
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
a.logger.DebugContext(ctx,
|
||||
"error querying for service metrics connection status",
|
||||
"error", err,
|
||||
"service", def.DefinitionMetadata.Id,
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
if resp != nil && len(resp.Data.Results) < 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if queryResponse == nil ||
|
||||
len(queryResponse.Aggregations) < 1 ||
|
||||
len(queryResponse.Aggregations[0].Series) < 1 ||
|
||||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
statusResp[index] = &integrationstypes.SignalConnectionStatus{
|
||||
CategoryID: category.Category,
|
||||
CategoryDisplayName: category.DisplayName,
|
||||
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
|
||||
LastReceivedFrom: "signoz-aws-integration",
|
||||
}
|
||||
}
|
||||
|
||||
return statusResp, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) getServiceLogsConnectionStatus(
|
||||
ctx context.Context,
|
||||
cloudAccountID string,
|
||||
orgID string,
|
||||
def *integrationstypes.AWSServiceDefinition,
|
||||
) ([]*integrationstypes.SignalConnectionStatus, error) {
|
||||
if def.Strategy == nil ||
|
||||
len(def.Strategy.AWSLogs.Subscriptions) < 1 ||
|
||||
len(def.DataCollected.Logs) < 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
|
||||
|
||||
for _, log := range def.IngestionStatusCheck.Logs {
|
||||
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
|
||||
CategoryID: log.Category,
|
||||
CategoryDisplayName: log.DisplayName,
|
||||
})
|
||||
}
|
||||
|
||||
for index, category := range def.IngestionStatusCheck.Logs {
|
||||
queries := make([]qbtypes.QueryEnvelope, 0)
|
||||
|
||||
for _, check := range category.Checks {
|
||||
filterExpression := fmt.Sprintf(`cloud.account.id="%s"`, cloudAccountID)
|
||||
f := ""
|
||||
for _, attribute := range check.Attributes {
|
||||
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
|
||||
if attribute.Value != "" {
|
||||
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
|
||||
}
|
||||
|
||||
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
|
||||
}
|
||||
|
||||
queries = append(queries, qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Name: valuer.GenerateUUID().String(),
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []qbtypes.LogAggregation{{
|
||||
Expression: "count()",
|
||||
}},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: filterExpression,
|
||||
},
|
||||
Limit: 10,
|
||||
Offset: 0,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
resp, err := a.querier.QueryRange(ctx, valuer.MustNewUUID(orgID), &qbtypes.QueryRangeRequest{
|
||||
SchemaVersion: "v1",
|
||||
Start: uint64(time.Now().Add(-time.Hour * 1).UnixMilli()),
|
||||
End: uint64(time.Now().UnixMilli()),
|
||||
RequestType: qbtypes.RequestTypeTimeSeries,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
a.logger.DebugContext(ctx,
|
||||
"error querying for service logs connection status",
|
||||
"error", err,
|
||||
"service", def.DefinitionMetadata.Id,
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
if resp != nil && len(resp.Data.Results) < 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if queryResponse == nil ||
|
||||
len(queryResponse.Aggregations) < 1 ||
|
||||
len(queryResponse.Aggregations[0].Series) < 1 ||
|
||||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
statusResp[index] = &integrationstypes.SignalConnectionStatus{
|
||||
CategoryID: category.Category,
|
||||
CategoryDisplayName: category.DisplayName,
|
||||
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
|
||||
LastReceivedFrom: "signoz-aws-integration",
|
||||
}
|
||||
}
|
||||
|
||||
return statusResp, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) getServiceConfig(ctx context.Context,
|
||||
def *integrationstypes.AWSServiceDefinition, orgID, cloudProvider, serviceId, cloudAccountId string,
|
||||
) (*integrationstypes.AWSCloudServiceConfig, error) {
|
||||
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, cloudProvider, cloudAccountId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config, err := a.serviceConfigRepo.Get(ctx, orgID, activeAccount.ID.StringValue(), serviceId)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if config != nil && serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
|
||||
def.PopulateDashboardURLs(serviceId)
|
||||
}
|
||||
|
||||
return serviceConfig, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// for now service dashboards are only available when metrics are enabled.
|
||||
servicesWithAvailableMetrics := map[string]*time.Time{}
|
||||
|
||||
for _, ar := range accountRecords {
|
||||
if ar.AccountID != nil {
|
||||
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for svcId, config := range configsBySvcId {
|
||||
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
|
||||
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
|
||||
|
||||
allServices, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list aws service definitions")
|
||||
}
|
||||
|
||||
for _, svc := range allServices {
|
||||
serviceDashboardsCreatedAt, ok := servicesWithAvailableMetrics[svc.Id]
|
||||
if ok {
|
||||
svcDashboards = append(
|
||||
svcDashboards,
|
||||
integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)...,
|
||||
)
|
||||
servicesWithAvailableMetrics[svc.Id] = nil
|
||||
}
|
||||
}
|
||||
|
||||
return svcDashboards, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
|
||||
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, d := range allDashboards {
|
||||
if d.ID == req.ID {
|
||||
return d, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
|
||||
}
|
||||
|
||||
func (a *awsProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
|
||||
connection := new(integrationstypes.PostableAWSConnectionUrl)
|
||||
|
||||
err := connection.Unmarshal(req.Data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if connection.AccountConfig != nil {
|
||||
for _, region := range connection.AccountConfig.EnabledRegions {
|
||||
if integrationstypes.ValidAWSRegions[region] {
|
||||
continue
|
||||
}
|
||||
|
||||
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
|
||||
}
|
||||
}
|
||||
|
||||
config, err := connection.AccountConfig.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
account, err := a.accountsRepo.Upsert(
|
||||
ctx, req.OrgID, integrationstypes.CloudProviderAWS.String(), nil, config,
|
||||
nil, nil, nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agentVersion := "v0.0.8"
|
||||
if connection.AgentConfig.Version != "" {
|
||||
agentVersion = connection.AgentConfig.Version
|
||||
}
|
||||
|
||||
baseURL := fmt.Sprintf("https://%s.console.aws.amazon.com/cloudformation/home",
|
||||
connection.AgentConfig.Region)
|
||||
u, _ := url.Parse(baseURL)
|
||||
|
||||
q := u.Query()
|
||||
q.Set("region", connection.AgentConfig.Region)
|
||||
u.Fragment = "/stacks/quickcreate"
|
||||
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
q = u.Query()
|
||||
q.Set("stackName", "signoz-integration")
|
||||
q.Set("templateURL", fmt.Sprintf("https://signoz-integrations.s3.us-east-1.amazonaws.com/aws-quickcreate-template-%s.json", agentVersion))
|
||||
q.Set("param_SigNozIntegrationAgentVersion", agentVersion)
|
||||
q.Set("param_SigNozApiUrl", connection.AgentConfig.SigNozAPIUrl)
|
||||
q.Set("param_SigNozApiKey", connection.AgentConfig.SigNozAPIKey)
|
||||
q.Set("param_SigNozAccountId", account.ID.StringValue())
|
||||
q.Set("param_IngestionUrl", connection.AgentConfig.IngestionUrl)
|
||||
q.Set("param_IngestionKey", connection.AgentConfig.IngestionKey)
|
||||
|
||||
return &integrationstypes.GettableAWSConnectionUrl{
|
||||
AccountId: account.ID.StringValue(),
|
||||
ConnectionUrl: u.String() + "?&" + q.Encode(), // this format is required by AWS
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
|
||||
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
|
||||
}
|
||||
|
||||
serviceConfig := new(integrationstypes.PatchableAWSCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(req.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = serviceConfig.Config.Validate(definition); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// can only update config for a connected cloud account id
|
||||
_, err = a.accountsRepo.GetConnectedCloudAccount(
|
||||
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceConfigBytes, err := serviceConfig.Config.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedConfig, err := a.serviceConfigRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = serviceConfig.Unmarshal(updatedConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &integrationstypes.PatchServiceConfigResponse{
|
||||
ServiceId: req.ServiceId,
|
||||
Config: serviceConfig,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
|
||||
config := new(integrationstypes.PatchableAWSAccountConfig)
|
||||
|
||||
err := config.Unmarshal(req.Data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if config.Config == nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "account config can't be null")
|
||||
}
|
||||
|
||||
for _, region := range config.Config.EnabledRegions {
|
||||
if integrationstypes.ValidAWSRegions[region] {
|
||||
continue
|
||||
}
|
||||
|
||||
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
|
||||
}
|
||||
|
||||
configBytes, err := config.Config.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// account must exist to update config, but it doesn't need to be connected
|
||||
_, err = a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
accountRecord, err := a.accountsRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return accountRecord.Account(a.GetName()), nil
|
||||
}
|
||||
|
||||
func (a *awsProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
|
||||
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tsNow := time.Now()
|
||||
account, err = a.accountsRepo.Upsert(
|
||||
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return account, nil
|
||||
}
|
||||
@@ -0,0 +1,584 @@
|
||||
package implazureprovider
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeInvalidAzureRegion = errors.MustNewCode("invalid_azure_region")
|
||||
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
|
||||
)
|
||||
|
||||
type azureProvider struct {
|
||||
logger *slog.Logger
|
||||
accountsRepo store.CloudProviderAccountsRepository
|
||||
serviceConfigRepo store.ServiceConfigDatabase
|
||||
azureServiceDefinitions *services.AzureServicesProvider
|
||||
querier querier.Querier
|
||||
}
|
||||
|
||||
func NewAzureCloudProvider(
|
||||
logger *slog.Logger,
|
||||
accountsRepo store.CloudProviderAccountsRepository,
|
||||
serviceConfigRepo store.ServiceConfigDatabase,
|
||||
querier querier.Querier,
|
||||
) integrationstypes.CloudProvider {
|
||||
azureServiceDefinitions, err := services.NewAzureCloudProviderServices()
|
||||
if err != nil {
|
||||
panic("failed to initialize Azure service definitions: " + err.Error())
|
||||
}
|
||||
|
||||
return &azureProvider{
|
||||
logger: logger,
|
||||
accountsRepo: accountsRepo,
|
||||
serviceConfigRepo: serviceConfigRepo,
|
||||
azureServiceDefinitions: azureServiceDefinitions,
|
||||
querier: querier,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *azureProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
|
||||
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &integrationstypes.GettableAccountStatus{
|
||||
Id: account.ID.String(),
|
||||
CloudAccountId: account.AccountID,
|
||||
Status: account.Status(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
|
||||
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
|
||||
for _, r := range accountRecords {
|
||||
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
|
||||
}
|
||||
|
||||
return &integrationstypes.GettableConnectedAccountsList{
|
||||
Accounts: connectedAccounts,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
|
||||
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"can't check in with new %s account id %s for account %s with existing %s id %s",
|
||||
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
|
||||
*existingAccount.AccountID,
|
||||
))
|
||||
}
|
||||
|
||||
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
|
||||
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
|
||||
return nil, model.BadRequest(fmt.Errorf(
|
||||
"can't check in to %s account %s with id %s. already connected with id %s",
|
||||
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue(),
|
||||
))
|
||||
}
|
||||
|
||||
agentReport := integrationstypes.AgentReport{
|
||||
TimestampMillis: time.Now().UnixMilli(),
|
||||
Data: req.Data,
|
||||
}
|
||||
|
||||
account, err := a.accountsRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agentConfig, err := a.getAzureAgentConfig(ctx, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &integrationstypes.GettableAzureAgentCheckIn{
|
||||
AccountId: account.ID.StringValue(),
|
||||
CloudAccountId: *account.AccountID,
|
||||
RemovedAt: account.RemovedAt,
|
||||
IntegrationConfig: *agentConfig,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) getAzureAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AzureAgentIntegrationConfig, error) {
|
||||
// prepare and return integration config to be consumed by agent
|
||||
agentConfig := &integrationstypes.AzureAgentIntegrationConfig{
|
||||
TelemetryCollectionStrategy: make(map[string]*integrationstypes.AzureCollectionStrategy),
|
||||
}
|
||||
|
||||
accountConfig := new(integrationstypes.AzureAccountConfig)
|
||||
err := accountConfig.Unmarshal(account.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if account.Config != nil {
|
||||
agentConfig.DeploymentRegion = accountConfig.DeploymentRegion
|
||||
agentConfig.EnabledResourceGroups = accountConfig.EnabledResourceGroups
|
||||
}
|
||||
|
||||
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
|
||||
ctx, account.OrgID, account.ID.StringValue(),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// accumulate config in a fixed order to ensure same config generated across runs
|
||||
configuredServices := maps.Keys(svcConfigs)
|
||||
slices.Sort(configuredServices)
|
||||
|
||||
metrics := make([]*integrationstypes.AzureMetricsStrategy, 0)
|
||||
logs := make([]*integrationstypes.AzureLogsStrategy, 0)
|
||||
|
||||
for _, svcType := range configuredServices {
|
||||
definition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, svcType)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
config := svcConfigs[svcType]
|
||||
|
||||
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
metricsStrategyMap := make(map[string]*integrationstypes.AzureMetricsStrategy)
|
||||
logsStrategyMap := make(map[string]*integrationstypes.AzureLogsStrategy)
|
||||
|
||||
for _, metric := range definition.Strategy.AzureMetrics {
|
||||
metricsStrategyMap[metric.Name] = metric
|
||||
}
|
||||
|
||||
for _, log := range definition.Strategy.AzureLogs {
|
||||
logsStrategyMap[log.Name] = log
|
||||
}
|
||||
|
||||
if serviceConfig.Metrics != nil {
|
||||
for _, metric := range serviceConfig.Metrics {
|
||||
if metric.Enabled {
|
||||
metrics = append(metrics, &integrationstypes.AzureMetricsStrategy{
|
||||
CategoryType: metricsStrategyMap[metric.Name].CategoryType,
|
||||
Name: metric.Name,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if serviceConfig.Logs != nil {
|
||||
for _, log := range serviceConfig.Logs {
|
||||
if log.Enabled {
|
||||
logs = append(logs, &integrationstypes.AzureLogsStrategy{
|
||||
CategoryType: logsStrategyMap[log.Name].CategoryType,
|
||||
Name: log.Name,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
strategy := integrationstypes.AzureCollectionStrategy{}
|
||||
|
||||
strategy.AzureMetrics = metrics
|
||||
strategy.AzureLogs = logs
|
||||
|
||||
agentConfig.TelemetryCollectionStrategy[svcType] = &strategy
|
||||
}
|
||||
|
||||
return agentConfig, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) GetName() valuer.String {
|
||||
return integrationstypes.CloudProviderAzure
|
||||
}
|
||||
|
||||
func (a *azureProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
|
||||
svcConfigs := make(map[string]*integrationstypes.AzureCloudServiceConfig)
|
||||
if cloudAccountID != nil {
|
||||
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for svcType, config := range serviceConfigs {
|
||||
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
svcConfigs[svcType] = serviceConfig
|
||||
}
|
||||
}
|
||||
|
||||
summaries := make([]integrationstypes.AzureServiceSummary, 0)
|
||||
|
||||
definitions, err := a.azureServiceDefinitions.ListServiceDefinitions(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("couldn't list aws service definitions: %w", err))
|
||||
}
|
||||
|
||||
for _, def := range definitions {
|
||||
summary := integrationstypes.AzureServiceSummary{
|
||||
DefinitionMetadata: def.DefinitionMetadata,
|
||||
Config: nil,
|
||||
}
|
||||
|
||||
summary.Config = svcConfigs[summary.Id]
|
||||
|
||||
summaries = append(summaries, summary)
|
||||
}
|
||||
|
||||
sort.Slice(summaries, func(i, j int) bool {
|
||||
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
|
||||
})
|
||||
|
||||
return &integrationstypes.GettableAzureServices{
|
||||
Services: summaries,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
|
||||
details := new(integrationstypes.GettableAzureServiceDetails)
|
||||
|
||||
azureDefinition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
|
||||
}
|
||||
|
||||
details.AzureServiceDefinition = *azureDefinition
|
||||
if req.CloudAccountID == nil {
|
||||
return details, nil
|
||||
}
|
||||
|
||||
config, err := a.getServiceConfig(ctx, &details.AzureServiceDefinition, req.OrgID, req.ServiceId, *req.CloudAccountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
details.Config = config
|
||||
|
||||
// fill default values for config
|
||||
if details.Config == nil {
|
||||
cfg := new(integrationstypes.AzureCloudServiceConfig)
|
||||
|
||||
logs := make([]*integrationstypes.AzureCloudServiceLogsConfig, 0)
|
||||
for _, log := range azureDefinition.Strategy.AzureLogs {
|
||||
logs = append(logs, &integrationstypes.AzureCloudServiceLogsConfig{
|
||||
Enabled: false,
|
||||
Name: log.Name,
|
||||
})
|
||||
}
|
||||
|
||||
metrics := make([]*integrationstypes.AzureCloudServiceMetricsConfig, 0)
|
||||
for _, metric := range azureDefinition.Strategy.AzureMetrics {
|
||||
metrics = append(metrics, &integrationstypes.AzureCloudServiceMetricsConfig{
|
||||
Enabled: false,
|
||||
Name: metric.Name,
|
||||
})
|
||||
}
|
||||
|
||||
cfg.Logs = logs
|
||||
cfg.Metrics = metrics
|
||||
|
||||
details.Config = cfg
|
||||
}
|
||||
|
||||
// TODO: write logic for getting connection status
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) getServiceConfig(
|
||||
ctx context.Context,
|
||||
definition *integrationstypes.AzureServiceDefinition,
|
||||
orgID string,
|
||||
serviceId string,
|
||||
cloudAccountId string,
|
||||
) (*integrationstypes.AzureCloudServiceConfig, error) {
|
||||
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), cloudAccountId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
configBytes, err := a.serviceConfigRepo.Get(ctx, orgID, activeAccount.ID.String(), serviceId)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config := new(integrationstypes.AzureCloudServiceConfig)
|
||||
err = config.Unmarshal(configBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, metric := range config.Metrics {
|
||||
if metric.Enabled {
|
||||
definition.PopulateDashboardURLs(serviceId)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
|
||||
connection := new(integrationstypes.PostableAzureConnectionCommand)
|
||||
|
||||
err := connection.Unmarshal(req.Data)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed unmarshal request data into AWS connection config")
|
||||
}
|
||||
|
||||
// validate connection config
|
||||
if connection.AccountConfig != nil {
|
||||
if !integrationstypes.ValidAzureRegions[connection.AccountConfig.DeploymentRegion] {
|
||||
return nil, errors.NewInvalidInputf(CodeInvalidAzureRegion, "invalid azure region: %s",
|
||||
connection.AccountConfig.DeploymentRegion,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
config, err := connection.AccountConfig.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
account, err := a.accountsRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), nil, config,
|
||||
nil, nil, nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agentVersion := "v0.0.1"
|
||||
|
||||
if connection.AgentConfig.Version != "" {
|
||||
agentVersion = connection.AgentConfig.Version
|
||||
}
|
||||
|
||||
// TODO: improve the command and set url
|
||||
cliCommand := []string{"az", "stack", "sub", "create", "--name", "SigNozIntegration", "--location",
|
||||
connection.AccountConfig.DeploymentRegion, "--template-uri", fmt.Sprintf("<url>%s", agentVersion),
|
||||
"--action-on-unmanage", "deleteAll", "--deny-settings-mode", "denyDelete", "--parameters", fmt.Sprintf("rgName=%s", "signoz-integration-rg"),
|
||||
fmt.Sprintf("rgLocation=%s", connection.AccountConfig.DeploymentRegion)}
|
||||
|
||||
return &integrationstypes.GettableAzureConnectionCommand{
|
||||
AccountId: account.ID.String(),
|
||||
AzureShellConnectionCommand: "az create",
|
||||
AzureCliConnectionCommand: strings.Join(cliCommand, " "),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
|
||||
definition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceConfig := new(integrationstypes.PatchableAzureCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(req.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = serviceConfig.Config.Validate(definition); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// can only update config for a connected cloud account id
|
||||
_, err = a.accountsRepo.GetConnectedCloudAccount(
|
||||
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceConfigBytes, err := serviceConfig.Config.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedConfig, err := a.serviceConfigRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = serviceConfig.Unmarshal(updatedConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &integrationstypes.PatchServiceConfigResponse{
|
||||
ServiceId: req.ServiceId,
|
||||
Config: serviceConfig,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// for now service dashboards are only available when metrics are enabled.
|
||||
servicesWithAvailableMetrics := map[string]*time.Time{}
|
||||
|
||||
for _, ar := range accountRecords {
|
||||
if ar.AccountID == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for svcId, config := range configsBySvcId {
|
||||
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
|
||||
err = serviceConfig.Unmarshal(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if serviceConfig.Metrics != nil {
|
||||
for _, metric := range serviceConfig.Metrics {
|
||||
if metric.Enabled {
|
||||
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
|
||||
|
||||
allServices, err := a.azureServiceDefinitions.ListServiceDefinitions(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list azure service definitions")
|
||||
}
|
||||
|
||||
for _, svc := range allServices {
|
||||
serviceDashboardsCreatedAt := servicesWithAvailableMetrics[svc.Id]
|
||||
if serviceDashboardsCreatedAt != nil {
|
||||
svcDashboards = integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)
|
||||
servicesWithAvailableMetrics[svc.Id] = nil
|
||||
}
|
||||
}
|
||||
|
||||
return svcDashboards, nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
|
||||
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, dashboard := range allDashboards {
|
||||
if dashboard.ID == req.ID {
|
||||
return dashboard, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
|
||||
}
|
||||
|
||||
func (a *azureProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
|
||||
config := new(integrationstypes.PatchableAzureAccountConfig)
|
||||
|
||||
err := config.Unmarshal(req.Data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if config.Config == nil && len(config.Config.EnabledResourceGroups) < 1 {
|
||||
return nil, errors.NewInvalidInputf(CodeInvalidAzureRegion, "azure region and resource groups must be provided")
|
||||
}
|
||||
|
||||
//for azure, preserve deployment region if already set
|
||||
account, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storedConfig := new(integrationstypes.AzureAccountConfig)
|
||||
err = storedConfig.Unmarshal(account.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if account.Config != nil {
|
||||
config.Config.DeploymentRegion = storedConfig.DeploymentRegion
|
||||
}
|
||||
|
||||
configBytes, err := config.Config.Marshal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
accountRecord, err := a.accountsRepo.Upsert(
|
||||
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return accountRecord.Account(a.GetName()), nil
|
||||
}
|
||||
|
||||
func (a *azureProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
|
||||
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tsNow := time.Now()
|
||||
account, err = a.accountsRepo.Upsert(
|
||||
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return account, nil
|
||||
}
|
||||
@@ -1,94 +1 @@
|
||||
package cloudintegrations
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
type ServiceSummary struct {
|
||||
services.Metadata
|
||||
|
||||
Config *types.CloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type ServiceDetails struct {
|
||||
services.Definition
|
||||
|
||||
Config *types.CloudServiceConfig `json:"config"`
|
||||
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type AccountStatus struct {
|
||||
Integration AccountIntegrationStatus `json:"integration"`
|
||||
}
|
||||
|
||||
type AccountIntegrationStatus struct {
|
||||
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
||||
}
|
||||
|
||||
type LogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
||||
}
|
||||
|
||||
type MetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
type ServiceConnectionStatus struct {
|
||||
Logs *SignalConnectionStatus `json:"logs"`
|
||||
Metrics *SignalConnectionStatus `json:"metrics"`
|
||||
}
|
||||
|
||||
type SignalConnectionStatus struct {
|
||||
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
|
||||
LastReceivedFrom string `json:"last_received_from"` // resource identifier
|
||||
}
|
||||
|
||||
type CompiledCollectionStrategy = services.CollectionStrategy
|
||||
|
||||
func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy, error) {
|
||||
if provider == "aws" {
|
||||
return &CompiledCollectionStrategy{
|
||||
Provider: "aws",
|
||||
AWSMetrics: &services.AWSMetricsStrategy{},
|
||||
AWSLogs: &services.AWSLogsStrategy{},
|
||||
}, nil
|
||||
}
|
||||
return nil, errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", provider)
|
||||
}
|
||||
|
||||
// Helper for accumulating strategies for enabled services.
|
||||
func AddServiceStrategy(serviceType string, cs *CompiledCollectionStrategy,
|
||||
definitionStrat *services.CollectionStrategy, config *types.CloudServiceConfig) error {
|
||||
if definitionStrat.Provider != cs.Provider {
|
||||
return errors.NewInternalf(CodeMismatchCloudProvider, "can't add %s service strategy to compiled strategy for %s",
|
||||
definitionStrat.Provider, cs.Provider)
|
||||
}
|
||||
|
||||
if cs.Provider == "aws" {
|
||||
if config.Logs != nil && config.Logs.Enabled {
|
||||
if serviceType == services.S3Sync {
|
||||
// S3 bucket sync; No cloudwatch logs are appended for this service type;
|
||||
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
|
||||
cs.S3Buckets = config.Logs.S3Buckets
|
||||
} else if definitionStrat.AWSLogs != nil { // services that includes a logs subscription
|
||||
cs.AWSLogs.Subscriptions = append(
|
||||
cs.AWSLogs.Subscriptions,
|
||||
definitionStrat.AWSLogs.Subscriptions...,
|
||||
)
|
||||
}
|
||||
}
|
||||
if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
|
||||
cs.AWSMetrics.StreamFilters = append(
|
||||
cs.AWSMetrics.StreamFilters,
|
||||
definitionStrat.AWSMetrics.StreamFilters...,
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cs.Provider)
|
||||
}
|
||||
|
||||
26
pkg/query-service/app/cloudintegrations/providerregistry.go
Normal file
26
pkg/query-service/app/cloudintegrations/providerregistry.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package cloudintegrations
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implawsprovider"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implazureprovider"
|
||||
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
)
|
||||
|
||||
func NewCloudProviderRegistry(logger *slog.Logger, store sqlstore.SQLStore, querier querier.Querier) map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider {
|
||||
registry := make(map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider)
|
||||
|
||||
accountsRepo := integrationstore.NewCloudProviderAccountsRepository(store)
|
||||
serviceConfigRepo := integrationstore.NewServiceConfigRepository(store)
|
||||
|
||||
awsProviderImpl := implawsprovider.NewAWSCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
|
||||
registry[integrationstypes.CloudProviderAWS] = awsProviderImpl
|
||||
azureProviderImpl := implazureprovider.NewAzureCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
|
||||
registry[integrationstypes.CloudProviderAzure] = azureProviderImpl
|
||||
|
||||
return registry
|
||||
}
|
||||
@@ -7,6 +7,24 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ApplicationELB_ConsumedLCUs_count",
|
||||
"attributes": []
|
||||
},
|
||||
{
|
||||
"key": "aws_ApplicationELB_ProcessedBytes_sum",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
|
||||
@@ -7,6 +7,75 @@
|
||||
"metrics": true,
|
||||
"logs": true
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "rest_api",
|
||||
"display_name": "REST API Metrics",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ApiGateway_Count_count",
|
||||
"attributes": [
|
||||
{
|
||||
"name": "ApiName",
|
||||
"operator": "EXISTS",
|
||||
"value": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"category": "http_api",
|
||||
"display_name": "HTTP API Metrics",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ApiGateway_Count_count",
|
||||
"attributes": [
|
||||
{
|
||||
"name": "ApiId",
|
||||
"operator": "EXISTS",
|
||||
"value": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"category": "websocket_api",
|
||||
"display_name": "Websocket API Metrics",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ApiGateway_Count_count",
|
||||
"attributes": [
|
||||
{
|
||||
"name": "ApiId",
|
||||
"operator": "EXISTS",
|
||||
"value": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"attributes": [
|
||||
{
|
||||
"name": "aws.cloudwatch.log_group_name",
|
||||
"operator": "ILIKE",
|
||||
"value": "API-Gateway%"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -148,6 +217,146 @@
|
||||
"name": "aws_ApiGateway_Latency_sum",
|
||||
"unit": "Milliseconds",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_4xx_sum",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_4xx_max",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_4xx_min",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_4xx_count",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_5xx_sum",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_5xx_max",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_5xx_min",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_5xx_count",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_DataProcessed_sum",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_DataProcessed_max",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_DataProcessed_min",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_DataProcessed_count",
|
||||
"unit": "Bytes",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ExecutionError_sum",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ExecutionError_max",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ExecutionError_min",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ExecutionError_count",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ClientError_sum",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ClientError_max",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ClientError_min",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ClientError_count",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_IntegrationError_sum",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_IntegrationError_max",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_IntegrationError_min",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_IntegrationError_count",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ConnectCount_sum",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ConnectCount_max",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ConnectCount_min",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
},
|
||||
{
|
||||
"name": "aws_ApiGateway_ConnectCount_count",
|
||||
"unit": "Count",
|
||||
"type": "Gauge"
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
|
||||
@@ -7,6 +7,24 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_DynamoDB_AccountMaxReads_max",
|
||||
"attributes": []
|
||||
},
|
||||
{
|
||||
"key": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -391,4 +409,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,24 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_EC2_CPUUtilization_max",
|
||||
"attributes": []
|
||||
},
|
||||
{
|
||||
"key": "aws_EC2_NetworkIn_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -515,4 +533,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,81 @@
|
||||
"metrics": true,
|
||||
"logs": true
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "overview",
|
||||
"display_name": "Overview",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ECS_CPUUtilization_max",
|
||||
"attributes": []
|
||||
},
|
||||
{
|
||||
"key": "aws_ECS_MemoryUtilization_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"category": "containerinsights",
|
||||
"display_name": "Container Insights",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ECS_ContainerInsights_NetworkRxBytes_max",
|
||||
"attributes": []
|
||||
},
|
||||
{
|
||||
"key": "aws_ECS_ContainerInsights_StorageReadBytes_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"category": "enhanced_containerinsights",
|
||||
"display_name": "Enhanced Container Insights",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ECS_ContainerInsights_ContainerCpuUtilization_max",
|
||||
"attributes": [
|
||||
{
|
||||
"name": "TaskId",
|
||||
"operator": "EXISTS",
|
||||
"value": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "aws_ECS_ContainerInsights_TaskMemoryUtilization_max",
|
||||
"attributes": [
|
||||
{
|
||||
"name": "TaskId",
|
||||
"operator": "EXISTS",
|
||||
"value": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"attributes": [
|
||||
{
|
||||
"name": "aws.cloudwatch.log_group_name",
|
||||
"operator": "ILIKE",
|
||||
"value": "%/ecs/%"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,6 +7,20 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_ElastiCache_CacheHitRate_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics":[
|
||||
{
|
||||
@@ -1928,7 +1942,7 @@
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"telemetry_collection_strategy": {
|
||||
@@ -1951,4 +1965,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,37 @@
|
||||
"metrics": true,
|
||||
"logs": true
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_Lambda_Invocations_sum",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"attributes": [
|
||||
{
|
||||
"name": "aws.cloudwatch.log_group_name",
|
||||
"operator": "ILIKE",
|
||||
"value": "/aws/lambda%"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
|
||||
@@ -7,6 +7,20 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_Kafka_KafkaDataLogsDiskUsed_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -1088,4 +1102,3 @@
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,37 @@
|
||||
"metrics": true,
|
||||
"logs": true
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_RDS_CPUUtilization_max",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"attributes": [
|
||||
{
|
||||
"name": "resources.aws.cloudwatch.log_group_name",
|
||||
"operator": "ILIKE",
|
||||
"value": "/aws/rds%"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -800,4 +831,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,20 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_SNS_NumberOfMessagesPublished_sum",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -127,4 +141,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,24 @@
|
||||
"metrics": true,
|
||||
"logs": false
|
||||
},
|
||||
"ingestion_status_check": {
|
||||
"metrics": [
|
||||
{
|
||||
"category": "$default",
|
||||
"display_name": "Default",
|
||||
"checks": [
|
||||
{
|
||||
"key": "aws_SQS_SentMessageSize_max",
|
||||
"attributes": []
|
||||
},
|
||||
{
|
||||
"key": "aws_SQS_NumberOfMessagesSent_sum",
|
||||
"attributes": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
@@ -247,4 +265,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
<svg id="f2f04349-8aee-4413-84c9-a9053611b319" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 18 18"><defs><linearGradient id="ad4c4f96-09aa-4f91-ba10-5cb8ad530f74" x1="9" y1="15.83" x2="9" y2="5.79" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#b3b3b3" /><stop offset="0.26" stop-color="#c1c1c1" /><stop offset="1" stop-color="#e6e6e6" /></linearGradient></defs><title>Icon-storage-86</title><path d="M.5,5.79h17a0,0,0,0,1,0,0v9.48a.57.57,0,0,1-.57.57H1.07a.57.57,0,0,1-.57-.57V5.79A0,0,0,0,1,.5,5.79Z" fill="url(#ad4c4f96-09aa-4f91-ba10-5cb8ad530f74)" /><path d="M1.07,2.17H16.93a.57.57,0,0,1,.57.57V5.79a0,0,0,0,1,0,0H.5a0,0,0,0,1,0,0V2.73A.57.57,0,0,1,1.07,2.17Z" fill="#37c2b1" /><path d="M2.81,6.89H15.18a.27.27,0,0,1,.26.27v1.4a.27.27,0,0,1-.26.27H2.81a.27.27,0,0,1-.26-.27V7.16A.27.27,0,0,1,2.81,6.89Z" fill="#fff" /><path d="M2.82,9.68H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V10A.27.27,0,0,1,2.82,9.68Z" fill="#37c2b1" /><path d="M2.82,12.5H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V12.77A.27.27,0,0,1,2.82,12.5Z" fill="#258277" /></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
@@ -0,0 +1,252 @@
|
||||
{
|
||||
"id": "blobstorage",
|
||||
"title": "Blob Storage",
|
||||
"icon": "file://icon.svg",
|
||||
"overview": "file://overview.md",
|
||||
"supported_signals": {
|
||||
"metrics": true,
|
||||
"logs": true
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"telemetry_collection_strategy": {
|
||||
"azure_metrics": [
|
||||
{
|
||||
"category_type": "metrics",
|
||||
"name": "Capacity"
|
||||
},
|
||||
{
|
||||
"category_type": "metrics",
|
||||
"name": "Transaction"
|
||||
}
|
||||
],
|
||||
"azure_logs": [
|
||||
{
|
||||
"category_type": "logs",
|
||||
"name": "StorageRead"
|
||||
},
|
||||
{
|
||||
"category_type": "logs",
|
||||
"name": "StorageWrite"
|
||||
},
|
||||
{
|
||||
"category_type": "logs",
|
||||
"name": "StorageDelete"
|
||||
}
|
||||
]
|
||||
},
|
||||
"assets": {
|
||||
"dashboards": [
|
||||
{
|
||||
"id": "overview",
|
||||
"title": "Blob Storage Overview",
|
||||
"description": "Overview of Blob Storage",
|
||||
"definition": "file://assets/dashboards/overview.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
Monitor Azure Blob Storage with SigNoz
|
||||
Collect key Blob Storage metrics and view them with an out of the box dashboard.
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
<svg id="f2f04349-8aee-4413-84c9-a9053611b319" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 18 18"><defs><linearGradient id="ad4c4f96-09aa-4f91-ba10-5cb8ad530f74" x1="9" y1="15.83" x2="9" y2="5.79" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#b3b3b3" /><stop offset="0.26" stop-color="#c1c1c1" /><stop offset="1" stop-color="#e6e6e6" /></linearGradient></defs><title>Icon-storage-86</title><path d="M.5,5.79h17a0,0,0,0,1,0,0v9.48a.57.57,0,0,1-.57.57H1.07a.57.57,0,0,1-.57-.57V5.79A0,0,0,0,1,.5,5.79Z" fill="url(#ad4c4f96-09aa-4f91-ba10-5cb8ad530f74)" /><path d="M1.07,2.17H16.93a.57.57,0,0,1,.57.57V5.79a0,0,0,0,1,0,0H.5a0,0,0,0,1,0,0V2.73A.57.57,0,0,1,1.07,2.17Z" fill="#37c2b1" /><path d="M2.81,6.89H15.18a.27.27,0,0,1,.26.27v1.4a.27.27,0,0,1-.26.27H2.81a.27.27,0,0,1-.26-.27V7.16A.27.27,0,0,1,2.81,6.89Z" fill="#fff" /><path d="M2.82,9.68H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V10A.27.27,0,0,1,2.82,9.68Z" fill="#37c2b1" /><path d="M2.82,12.5H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V12.77A.27.27,0,0,1,2.82,12.5Z" fill="#258277" /></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
@@ -0,0 +1,247 @@
|
||||
{
|
||||
"id": "frontdoor",
|
||||
"title": "Front Door",
|
||||
"icon": "file://icon.svg",
|
||||
"overview": "file://overview.md",
|
||||
"supported_signals": {
|
||||
"metrics": true,
|
||||
"logs": true
|
||||
},
|
||||
"data_collected": {
|
||||
"metrics": [
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"name": "placeholder_metric_1",
|
||||
"unit": "Percent",
|
||||
"type": "Gauge",
|
||||
"description": ""
|
||||
}
|
||||
],
|
||||
"logs": [
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "placeholder_log_1",
|
||||
"path": "placeholder.path.value",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"telemetry_collection_strategy": {
|
||||
"azure_metrics": [
|
||||
{
|
||||
"category_type": "metrics",
|
||||
"name": "Capacity"
|
||||
},
|
||||
{
|
||||
"category_type": "metrics",
|
||||
"name": "Transaction"
|
||||
}
|
||||
],
|
||||
"azure_logs": [
|
||||
{
|
||||
"category_type": "logs",
|
||||
"name": "StorageRead"
|
||||
},
|
||||
{
|
||||
"category_type": "logs",
|
||||
"name": "StorageWrite"
|
||||
},
|
||||
{
|
||||
"category_type": "logs",
|
||||
"name": "StorageDelete"
|
||||
}
|
||||
]
|
||||
},
|
||||
"assets": {
|
||||
"dashboards": [
|
||||
{
|
||||
"id": "overview",
|
||||
"title": "Front Door Overview",
|
||||
"description": "Overview of Blob Storage",
|
||||
"definition": "file://assets/dashboards/overview.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
Monitor Azure Front Door with SigNoz
|
||||
Collect key Front Door metrics and view them with an out of the box dashboard.
|
||||
@@ -1,91 +1 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
)
|
||||
|
||||
type Metadata struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Icon string `json:"icon"`
|
||||
}
|
||||
|
||||
type Definition struct {
|
||||
Metadata
|
||||
|
||||
Overview string `json:"overview"` // markdown
|
||||
|
||||
Assets Assets `json:"assets"`
|
||||
|
||||
SupportedSignals SupportedSignals `json:"supported_signals"`
|
||||
|
||||
DataCollected DataCollected `json:"data_collected"`
|
||||
|
||||
Strategy *CollectionStrategy `json:"telemetry_collection_strategy"`
|
||||
}
|
||||
|
||||
type Assets struct {
|
||||
Dashboards []Dashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
type SupportedSignals struct {
|
||||
Logs bool `json:"logs"`
|
||||
Metrics bool `json:"metrics"`
|
||||
}
|
||||
|
||||
type DataCollected struct {
|
||||
Logs []CollectedLogAttribute `json:"logs"`
|
||||
Metrics []CollectedMetric `json:"metrics"`
|
||||
}
|
||||
|
||||
type CollectedLogAttribute struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type CollectedMetric struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Unit string `json:"unit"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
type CollectionStrategy struct {
|
||||
Provider string `json:"provider"`
|
||||
|
||||
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
|
||||
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type
|
||||
}
|
||||
|
||||
type AWSMetricsStrategy struct {
|
||||
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
|
||||
StreamFilters []struct {
|
||||
// json tags here are in the shape expected by AWS API as detailed at
|
||||
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
|
||||
Namespace string `json:"Namespace"`
|
||||
MetricNames []string `json:"MetricNames,omitempty"`
|
||||
} `json:"cloudwatch_metric_stream_filters"`
|
||||
}
|
||||
|
||||
type AWSLogsStrategy struct {
|
||||
Subscriptions []struct {
|
||||
// subscribe to all logs groups with specified prefix.
|
||||
// eg: `/aws/rds/`
|
||||
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
||||
|
||||
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
|
||||
// "" implies no filtering is required.
|
||||
FilterPattern string `json:"filter_pattern"`
|
||||
} `json:"cloudwatch_logs_subscriptions"`
|
||||
}
|
||||
|
||||
type Dashboard struct {
|
||||
Id string `json:"id"`
|
||||
Url string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Image string `json:"image"`
|
||||
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
||||
}
|
||||
|
||||
@@ -2,128 +2,128 @@ package services
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"path"
|
||||
"sort"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
koanfJson "github.com/knadh/koanf/parsers/json"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
const (
|
||||
S3Sync = "s3sync"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeUnsupportedCloudProvider = errors.MustNewCode("unsupported_cloud_provider")
|
||||
CodeUnsupportedServiceType = errors.MustNewCode("unsupported_service_type")
|
||||
CodeServiceDefinitionNotFound = errors.MustNewCode("service_definition_not_dound")
|
||||
)
|
||||
|
||||
func List(cloudProvider string) ([]Definition, *model.ApiError) {
|
||||
cloudServices, found := supportedServices[cloudProvider]
|
||||
if !found || cloudServices == nil {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
"unsupported cloud provider: %s", cloudProvider,
|
||||
))
|
||||
type (
|
||||
AWSServicesProvider struct {
|
||||
definitions map[string]*integrationstypes.AWSServiceDefinition
|
||||
}
|
||||
AzureServicesProvider struct {
|
||||
definitions map[string]*integrationstypes.AzureServiceDefinition
|
||||
}
|
||||
)
|
||||
|
||||
services := maps.Values(cloudServices)
|
||||
sort.Slice(services, func(i, j int) bool {
|
||||
return services[i].Id < services[j].Id
|
||||
})
|
||||
|
||||
return services, nil
|
||||
func (a *AzureServicesProvider) ListServiceDefinitions(ctx context.Context) (map[string]*integrationstypes.AzureServiceDefinition, error) {
|
||||
return a.definitions, nil
|
||||
}
|
||||
|
||||
func Map(cloudProvider string) (map[string]Definition, error) {
|
||||
cloudServices, found := supportedServices[cloudProvider]
|
||||
if !found || cloudServices == nil {
|
||||
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cloudProvider)
|
||||
func (a *AzureServicesProvider) GetServiceDefinition(ctx context.Context, serviceName string) (*integrationstypes.AzureServiceDefinition, error) {
|
||||
def, ok := a.definitions[serviceName]
|
||||
if !ok {
|
||||
return nil, errors.NewNotFoundf(CodeServiceDefinitionNotFound, "azure service definition not found: %s", serviceName)
|
||||
}
|
||||
|
||||
return def, nil
|
||||
}
|
||||
|
||||
func (a *AWSServicesProvider) ListServiceDefinitions(ctx context.Context) (map[string]*integrationstypes.AWSServiceDefinition, error) {
|
||||
return a.definitions, nil
|
||||
}
|
||||
|
||||
func (a *AWSServicesProvider) GetServiceDefinition(ctx context.Context, serviceName string) (*integrationstypes.AWSServiceDefinition, error) {
|
||||
def, ok := a.definitions[serviceName]
|
||||
if !ok {
|
||||
return nil, errors.NewNotFoundf(CodeServiceDefinitionNotFound, "aws service definition not found: %s", serviceName)
|
||||
}
|
||||
|
||||
return def, nil
|
||||
}
|
||||
|
||||
func NewAWSCloudProviderServices() (*AWSServicesProvider, error) {
|
||||
definitions, err := readAllServiceDefinitions(integrationstypes.CloudProviderAWS)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceDefinitions := make(map[string]*integrationstypes.AWSServiceDefinition)
|
||||
for id, def := range definitions {
|
||||
typedDef, ok := def.(*integrationstypes.AWSServiceDefinition)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid type for AWS service definition %s", id)
|
||||
}
|
||||
serviceDefinitions[id] = typedDef
|
||||
}
|
||||
|
||||
return &AWSServicesProvider{
|
||||
definitions: serviceDefinitions,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewAzureCloudProviderServices() (*AzureServicesProvider, error) {
|
||||
definitions, err := readAllServiceDefinitions(integrationstypes.CloudProviderAzure)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceDefinitions := make(map[string]*integrationstypes.AzureServiceDefinition)
|
||||
for id, def := range definitions {
|
||||
typedDef, ok := def.(*integrationstypes.AzureServiceDefinition)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid type for Azure service definition %s", id)
|
||||
}
|
||||
serviceDefinitions[id] = typedDef
|
||||
}
|
||||
|
||||
return &AzureServicesProvider{
|
||||
definitions: serviceDefinitions,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// End of API. Logic for reading service definition files follows
|
||||
|
||||
//go:embed definitions/*
|
||||
var definitionFiles embed.FS
|
||||
|
||||
func readAllServiceDefinitions(cloudProvider valuer.String) (map[string]any, error) {
|
||||
rootDirName := "definitions"
|
||||
|
||||
cloudProviderDirPath := path.Join(rootDirName, cloudProvider.String())
|
||||
|
||||
cloudServices, err := readServiceDefinitionsFromDir(cloudProvider, cloudProviderDirPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(cloudServices) < 1 {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "no service definitions found in %s", cloudProviderDirPath)
|
||||
}
|
||||
|
||||
return cloudServices, nil
|
||||
}
|
||||
|
||||
func GetServiceDefinition(cloudProvider, serviceType string) (*Definition, error) {
|
||||
cloudServices := supportedServices[cloudProvider]
|
||||
if cloudServices == nil {
|
||||
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cloudProvider)
|
||||
}
|
||||
|
||||
svc, exists := cloudServices[serviceType]
|
||||
if !exists {
|
||||
return nil, errors.Newf(errors.TypeNotFound, CodeUnsupportedServiceType, "%s service not found: %s", cloudProvider, serviceType)
|
||||
}
|
||||
|
||||
return &svc, nil
|
||||
}
|
||||
|
||||
// End of API. Logic for reading service definition files follows
|
||||
|
||||
// Service details read from ./serviceDefinitions
|
||||
// { "providerName": { "service_id": {...}} }
|
||||
var supportedServices map[string]map[string]Definition
|
||||
|
||||
func init() {
|
||||
err := readAllServiceDefinitions()
|
||||
if err != nil {
|
||||
panic(fmt.Errorf(
|
||||
"couldn't read cloud service definitions: %w", err,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
//go:embed definitions/*
|
||||
var definitionFiles embed.FS
|
||||
|
||||
func readAllServiceDefinitions() error {
|
||||
supportedServices = map[string]map[string]Definition{}
|
||||
|
||||
rootDirName := "definitions"
|
||||
|
||||
cloudProviderDirs, err := fs.ReadDir(definitionFiles, rootDirName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("couldn't read dirs in %s: %w", rootDirName, err)
|
||||
}
|
||||
|
||||
for _, d := range cloudProviderDirs {
|
||||
if !d.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
cloudProvider := d.Name()
|
||||
|
||||
cloudProviderDirPath := path.Join(rootDirName, cloudProvider)
|
||||
cloudServices, err := readServiceDefinitionsFromDir(cloudProvider, cloudProviderDirPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("couldn't read %s service definitions: %w", cloudProvider, err)
|
||||
}
|
||||
|
||||
if len(cloudServices) < 1 {
|
||||
return fmt.Errorf("no %s services could be read", cloudProvider)
|
||||
}
|
||||
|
||||
supportedServices[cloudProvider] = cloudServices
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath string) (
|
||||
map[string]Definition, error,
|
||||
) {
|
||||
func readServiceDefinitionsFromDir(cloudProvider valuer.String, cloudProviderDirPath string) (map[string]any, error) {
|
||||
svcDefDirs, err := fs.ReadDir(definitionFiles, cloudProviderDirPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't list integrations dirs: %w", err)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't list integrations dirs")
|
||||
}
|
||||
|
||||
svcDefs := map[string]Definition{}
|
||||
svcDefs := make(map[string]any)
|
||||
|
||||
for _, d := range svcDefDirs {
|
||||
if !d.IsDir() {
|
||||
@@ -133,103 +133,73 @@ func readServiceDefinitionsFromDir(cloudProvider string, cloudProviderDirPath st
|
||||
svcDirPath := path.Join(cloudProviderDirPath, d.Name())
|
||||
s, err := readServiceDefinition(cloudProvider, svcDirPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't read svc definition for %s: %w", d.Name(), err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, exists := svcDefs[s.Id]
|
||||
_, exists := svcDefs[s.GetId()]
|
||||
if exists {
|
||||
return nil, fmt.Errorf(
|
||||
"duplicate service definition for id %s at %s", s.Id, d.Name(),
|
||||
)
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "duplicate service definition for id %s at %s", s.GetId(), d.Name())
|
||||
}
|
||||
svcDefs[s.Id] = *s
|
||||
svcDefs[s.GetId()] = s
|
||||
}
|
||||
|
||||
return svcDefs, nil
|
||||
}
|
||||
|
||||
func readServiceDefinition(cloudProvider string, svcDirpath string) (*Definition, error) {
|
||||
func readServiceDefinition(cloudProvider valuer.String, svcDirpath string) (integrationstypes.Definition, error) {
|
||||
integrationJsonPath := path.Join(svcDirpath, "integration.json")
|
||||
|
||||
serializedSpec, err := definitionFiles.ReadFile(integrationJsonPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't find integration.json in %s: %w",
|
||||
svcDirpath, err,
|
||||
)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read integration definition in %s", svcDirpath)
|
||||
}
|
||||
|
||||
integrationSpec, err := koanfJson.Parser().Unmarshal(serializedSpec)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't parse integration.json from %s: %w",
|
||||
integrationJsonPath, err,
|
||||
)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't parse integration definition in %s", svcDirpath)
|
||||
}
|
||||
|
||||
hydrated, err := integrations.HydrateFileUris(
|
||||
integrationSpec, definitionFiles, svcDirpath,
|
||||
)
|
||||
hydrated, err := integrations.HydrateFileUris(integrationSpec, definitionFiles, svcDirpath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't hydrate files referenced in service definition %s: %w",
|
||||
integrationJsonPath, err,
|
||||
)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't hydrate integration definition in %s", svcDirpath)
|
||||
}
|
||||
hydratedSpec := hydrated.(map[string]any)
|
||||
|
||||
serviceDef, err := ParseStructWithJsonTagsFromMap[Definition](hydratedSpec)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't parse hydrated JSON spec read from %s: %w",
|
||||
integrationJsonPath, err,
|
||||
)
|
||||
var serviceDef integrationstypes.Definition
|
||||
|
||||
switch cloudProvider {
|
||||
case integrationstypes.CloudProviderAWS:
|
||||
serviceDef = &integrationstypes.AWSServiceDefinition{}
|
||||
case integrationstypes.CloudProviderAzure:
|
||||
serviceDef = &integrationstypes.AzureServiceDefinition{}
|
||||
default:
|
||||
// ideally this shouldn't happen hence throwing internal error
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "unsupported cloud provider: %s", cloudProvider)
|
||||
}
|
||||
|
||||
err = validateServiceDefinition(serviceDef)
|
||||
err = parseStructWithJsonTagsFromMap(hydratedSpec, serviceDef)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid service definition %s: %w", serviceDef.Id, err)
|
||||
return nil, err
|
||||
}
|
||||
err = serviceDef.Validate()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceDef.Strategy.Provider = cloudProvider
|
||||
|
||||
return serviceDef, nil
|
||||
|
||||
}
|
||||
|
||||
func validateServiceDefinition(s *Definition) error {
|
||||
// Validate dashboard data
|
||||
seenDashboardIds := map[string]interface{}{}
|
||||
for _, dd := range s.Assets.Dashboards {
|
||||
if _, seen := seenDashboardIds[dd.Id]; seen {
|
||||
return fmt.Errorf("multiple dashboards found with id %s", dd.Id)
|
||||
}
|
||||
seenDashboardIds[dd.Id] = nil
|
||||
}
|
||||
|
||||
if s.Strategy == nil {
|
||||
return fmt.Errorf("telemetry_collection_strategy is required")
|
||||
}
|
||||
|
||||
// potentially more to follow
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseStructWithJsonTagsFromMap[StructType any](data map[string]any) (
|
||||
*StructType, error,
|
||||
) {
|
||||
func parseStructWithJsonTagsFromMap(data map[string]any, target interface{}) error {
|
||||
mapJson, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't marshal map to json: %w", err)
|
||||
return errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal service definition json data")
|
||||
}
|
||||
|
||||
var res StructType
|
||||
decoder := json.NewDecoder(bytes.NewReader(mapJson))
|
||||
decoder.DisallowUnknownFields()
|
||||
err = decoder.Decode(&res)
|
||||
err = decoder.Decode(target)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("couldn't unmarshal json back to struct: %w", err)
|
||||
return errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal service definition json data")
|
||||
}
|
||||
return &res, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,35 +1,3 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAvailableServices(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
// should be able to list available services.
|
||||
_, apiErr := List("bad-cloud-provider")
|
||||
require.NotNil(apiErr)
|
||||
require.Equal(model.ErrorNotFound, apiErr.Type())
|
||||
|
||||
awsSvcs, apiErr := List("aws")
|
||||
require.Nil(apiErr)
|
||||
require.Greater(len(awsSvcs), 0)
|
||||
|
||||
// should be able to get details of a service
|
||||
_, err := GetServiceDefinition(
|
||||
"aws", "bad-service-id",
|
||||
)
|
||||
require.NotNil(err)
|
||||
require.True(errors.Ast(err, errors.TypeNotFound))
|
||||
|
||||
svc, err := GetServiceDefinition(
|
||||
"aws", awsSvcs[0].Id,
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(*svc, awsSvcs[0])
|
||||
}
|
||||
// TODO: add more tests for services package
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package cloudintegrations
|
||||
package store
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -7,49 +7,50 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type cloudProviderAccountsRepository interface {
|
||||
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
|
||||
var (
|
||||
CodeCloudIntegrationAccountNotFound errors.Code = errors.MustNewCode("cloud_integration_account_not_found")
|
||||
)
|
||||
|
||||
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
|
||||
type CloudProviderAccountsRepository interface {
|
||||
ListConnected(ctx context.Context, orgId string, provider string) ([]integrationstypes.CloudIntegration, error)
|
||||
|
||||
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
|
||||
Get(ctx context.Context, orgId string, provider string, id string) (*integrationstypes.CloudIntegration, error)
|
||||
|
||||
GetConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*integrationstypes.CloudIntegration, error)
|
||||
|
||||
// Insert an account or update it by (cloudProvider, id)
|
||||
// for specified non-empty fields
|
||||
upsert(
|
||||
Upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config *types.AccountConfig,
|
||||
config []byte,
|
||||
accountId *string,
|
||||
agentReport *types.AgentReport,
|
||||
agentReport *integrationstypes.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*types.CloudIntegration, *model.ApiError)
|
||||
) (*integrationstypes.CloudIntegration, error)
|
||||
}
|
||||
|
||||
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
|
||||
*cloudProviderAccountsSQLRepository, error,
|
||||
) {
|
||||
return &cloudProviderAccountsSQLRepository{
|
||||
store: store,
|
||||
}, nil
|
||||
func NewCloudProviderAccountsRepository(store sqlstore.SQLStore) CloudProviderAccountsRepository {
|
||||
return &cloudProviderAccountsSQLRepository{store: store}
|
||||
}
|
||||
|
||||
type cloudProviderAccountsSQLRepository struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||
func (r *cloudProviderAccountsSQLRepository) ListConnected(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) ([]types.CloudIntegration, *model.ApiError) {
|
||||
accounts := []types.CloudIntegration{}
|
||||
) ([]integrationstypes.CloudIntegration, error) {
|
||||
accounts := []integrationstypes.CloudIntegration{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&accounts).
|
||||
@@ -62,18 +63,16 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query connected cloud accounts: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not query connected cloud accounts")
|
||||
}
|
||||
|
||||
return accounts, nil
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) get(
|
||||
func (r *cloudProviderAccountsSQLRepository) Get(
|
||||
ctx context.Context, orgId string, provider string, id string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
var result types.CloudIntegration
|
||||
) (*integrationstypes.CloudIntegration, error) {
|
||||
var result integrationstypes.CloudIntegration
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
@@ -82,23 +81,25 @@ func (r *cloudProviderAccountsSQLRepository) get(
|
||||
Where("id = ?", id).
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
"couldn't find account with Id %s", id,
|
||||
))
|
||||
} else if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't query cloud provider accounts: %w", err,
|
||||
))
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(
|
||||
err,
|
||||
CodeCloudIntegrationAccountNotFound,
|
||||
"couldn't find account with Id %s", id,
|
||||
)
|
||||
}
|
||||
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
||||
func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
|
||||
ctx context.Context, orgId string, provider string, accountId string,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
var result types.CloudIntegration
|
||||
) (*integrationstypes.CloudIntegration, error) {
|
||||
var result integrationstypes.CloudIntegration
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
@@ -109,29 +110,25 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
||||
Where("removed_at is NULL").
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
"couldn't find connected cloud account %s", accountId,
|
||||
))
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(err, CodeCloudIntegrationAccountNotFound, "couldn't find connected cloud account %s", accountId)
|
||||
} else if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't query cloud provider accounts: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
func (r *cloudProviderAccountsSQLRepository) Upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
provider string,
|
||||
id *string,
|
||||
config *types.AccountConfig,
|
||||
config []byte,
|
||||
accountId *string,
|
||||
agentReport *types.AgentReport,
|
||||
agentReport *integrationstypes.AgentReport,
|
||||
removedAt *time.Time,
|
||||
) (*types.CloudIntegration, *model.ApiError) {
|
||||
) (*integrationstypes.CloudIntegration, error) {
|
||||
// Insert
|
||||
if id == nil {
|
||||
temp := valuer.GenerateUUID().StringValue()
|
||||
@@ -181,7 +178,7 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
)
|
||||
}
|
||||
|
||||
integration := types.CloudIntegration{
|
||||
integration := integrationstypes.CloudIntegration{
|
||||
OrgID: orgId,
|
||||
Provider: provider,
|
||||
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
|
||||
@@ -195,22 +192,18 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||
RemovedAt: removedAt,
|
||||
}
|
||||
|
||||
_, dbErr := r.store.BunDB().NewInsert().
|
||||
_, err := r.store.BunDB().NewInsert().
|
||||
Model(&integration).
|
||||
On(onConflictClause).
|
||||
Exec(ctx)
|
||||
|
||||
if dbErr != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not upsert cloud account record: %w", dbErr,
|
||||
))
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud integration account")
|
||||
}
|
||||
|
||||
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
|
||||
if apiErr != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
|
||||
))
|
||||
upsertedAccount, err := r.Get(ctx, orgId, provider, *id)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't get upserted cloud integration account")
|
||||
}
|
||||
|
||||
return upsertedAccount, nil
|
||||
@@ -1,64 +1,63 @@
|
||||
package cloudintegrations
|
||||
package store
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeServiceConfigNotFound = errors.MustNewCode("service_config_not_found")
|
||||
)
|
||||
|
||||
type ServiceConfigDatabase interface {
|
||||
get(
|
||||
Get(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceType string,
|
||||
) (*types.CloudServiceConfig, *model.ApiError)
|
||||
) ([]byte, error)
|
||||
|
||||
upsert(
|
||||
Upsert(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config types.CloudServiceConfig,
|
||||
) (*types.CloudServiceConfig, *model.ApiError)
|
||||
config []byte,
|
||||
) ([]byte, error)
|
||||
|
||||
getAllForAccount(
|
||||
GetAllForAccount(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (
|
||||
configsBySvcId map[string]*types.CloudServiceConfig,
|
||||
apiErr *model.ApiError,
|
||||
map[string][]byte,
|
||||
error,
|
||||
)
|
||||
}
|
||||
|
||||
func newServiceConfigRepository(store sqlstore.SQLStore) (
|
||||
*serviceConfigSQLRepository, error,
|
||||
) {
|
||||
return &serviceConfigSQLRepository{
|
||||
store: store,
|
||||
}, nil
|
||||
func NewServiceConfigRepository(store sqlstore.SQLStore) ServiceConfigDatabase {
|
||||
return &serviceConfigSQLRepository{store: store}
|
||||
}
|
||||
|
||||
type serviceConfigSQLRepository struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) get(
|
||||
func (r *serviceConfigSQLRepository) Get(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
serviceType string,
|
||||
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
var result types.CloudIntegrationService
|
||||
) ([]byte, error) {
|
||||
var result integrationstypes.CloudIntegrationService
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&result).
|
||||
@@ -67,36 +66,30 @@ func (r *serviceConfigSQLRepository) get(
|
||||
Where("ci.id = ?", cloudAccountId).
|
||||
Where("cis.type = ?", serviceType).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(err, CodeServiceConfigNotFound, "couldn't find config for cloud account %s", cloudAccountId)
|
||||
}
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(fmt.Errorf(
|
||||
"couldn't find config for cloud account %s",
|
||||
cloudAccountId,
|
||||
))
|
||||
} else if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't query cloud service config: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud service config")
|
||||
}
|
||||
|
||||
return &result.Config, nil
|
||||
|
||||
return result.Config, nil
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) upsert(
|
||||
func (r *serviceConfigSQLRepository) Upsert(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
serviceId string,
|
||||
config types.CloudServiceConfig,
|
||||
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||
|
||||
config []byte,
|
||||
) ([]byte, error) {
|
||||
// get cloud integration id from account id
|
||||
// if the account is not connected, we don't need to upsert the config
|
||||
var cloudIntegrationId string
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model((*types.CloudIntegration)(nil)).
|
||||
Model((*integrationstypes.CloudIntegration)(nil)).
|
||||
Column("id").
|
||||
Where("provider = ?", cloudProvider).
|
||||
Where("account_id = ?", cloudAccountId).
|
||||
@@ -104,14 +97,18 @@ func (r *serviceConfigSQLRepository) upsert(
|
||||
Where("removed_at is NULL").
|
||||
Where("last_agent_report is not NULL").
|
||||
Scan(ctx, &cloudIntegrationId)
|
||||
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"couldn't query cloud integration id: %w", err,
|
||||
))
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, errors.WrapNotFoundf(
|
||||
err,
|
||||
CodeCloudIntegrationAccountNotFound,
|
||||
"couldn't find active cloud integration account",
|
||||
)
|
||||
}
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud integration id")
|
||||
}
|
||||
|
||||
serviceConfig := types.CloudIntegrationService{
|
||||
serviceConfig := integrationstypes.CloudIntegrationService{
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
@@ -126,21 +123,18 @@ func (r *serviceConfigSQLRepository) upsert(
|
||||
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not upsert cloud service config: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud service config")
|
||||
}
|
||||
|
||||
return &serviceConfig.Config, nil
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (r *serviceConfigSQLRepository) getAllForAccount(
|
||||
func (r *serviceConfigSQLRepository) GetAllForAccount(
|
||||
ctx context.Context,
|
||||
orgID string,
|
||||
cloudAccountId string,
|
||||
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
|
||||
serviceConfigs := []types.CloudIntegrationService{}
|
||||
) (map[string][]byte, error) {
|
||||
var serviceConfigs []integrationstypes.CloudIntegrationService
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&serviceConfigs).
|
||||
@@ -149,15 +143,13 @@ func (r *serviceConfigSQLRepository) getAllForAccount(
|
||||
Where("ci.org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query service configs from db: %w", err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query service configs from db")
|
||||
}
|
||||
|
||||
result := map[string]*types.CloudServiceConfig{}
|
||||
result := make(map[string][]byte)
|
||||
|
||||
for _, r := range serviceConfigs {
|
||||
result[r.Type] = &r.Config
|
||||
result[r.Type] = r.Config
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@@ -6,11 +6,7 @@ import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"log/slog"
|
||||
|
||||
"io"
|
||||
"math"
|
||||
@@ -25,14 +21,18 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
|
||||
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
|
||||
@@ -44,7 +44,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/contextlinks"
|
||||
traceFunnelsModule "github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
|
||||
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
|
||||
@@ -113,7 +112,7 @@ type APIHandler struct {
|
||||
|
||||
IntegrationsController *integrations.Controller
|
||||
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
cloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
|
||||
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
|
||||
@@ -163,7 +162,7 @@ type APIHandlerOpts struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
|
||||
// Cloud Provider Integrations
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
CloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
|
||||
|
||||
// Log parsing pipelines
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
@@ -183,7 +182,7 @@ type APIHandlerOpts struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Signoz.Cache,
|
||||
@@ -220,7 +219,7 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
temporalityMap: make(map[string]map[v3.Temporality]bool),
|
||||
ruleManager: opts.RuleManager,
|
||||
IntegrationsController: opts.IntegrationsController,
|
||||
CloudIntegrationsController: opts.CloudIntegrationsController,
|
||||
cloudIntegrationsRegistry: opts.CloudIntegrationsRegistry,
|
||||
LogsParsingPipelineController: opts.LogsParsingPipelineController,
|
||||
querier: querier,
|
||||
querierV2: querierv2,
|
||||
@@ -270,11 +269,6 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
}
|
||||
}
|
||||
|
||||
// If the root user is enabled, the setup is complete
|
||||
if config.User.Root.Enabled {
|
||||
aH.SetupCompleted = true
|
||||
}
|
||||
|
||||
aH.Upgrader = &websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
@@ -1222,13 +1216,22 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
dashboard := new(dashboardtypes.Dashboard)
|
||||
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
|
||||
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
|
||||
if apiErr != nil {
|
||||
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
|
||||
if integrationstypes.IsCloudIntegrationDashboardUuid(id) {
|
||||
cloudProvider, err := integrationstypes.GetCloudProviderFromDashboardID(id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
dashboard = cloudIntegrationDashboard
|
||||
|
||||
integrationDashboard, err := aH.cloudIntegrationsRegistry[cloudProvider].GetDashboard(ctx, &integrationstypes.GettableDashboard{
|
||||
ID: id,
|
||||
OrgID: orgID,
|
||||
})
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
dashboard = integrationDashboard
|
||||
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
|
||||
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
|
||||
if apiErr != nil {
|
||||
@@ -1292,11 +1295,13 @@ func (aH *APIHandler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
dashboards = append(dashboards, installedIntegrationDashboards...)
|
||||
}
|
||||
|
||||
cloudIntegrationDashboards, apiErr := aH.CloudIntegrationsController.AvailableDashboards(ctx, orgID)
|
||||
if apiErr != nil {
|
||||
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
|
||||
} else {
|
||||
dashboards = append(dashboards, cloudIntegrationDashboards...)
|
||||
for _, provider := range aH.cloudIntegrationsRegistry {
|
||||
cloudIntegrationDashboards, err := provider.GetAvailableDashboards(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get dashboards for cloud integrations", zap.Error(apiErr))
|
||||
} else {
|
||||
dashboards = append(dashboards, cloudIntegrationDashboards...)
|
||||
}
|
||||
}
|
||||
|
||||
gettableDashboards, err := dashboardtypes.NewGettableDashboardsFromDashboards(dashboards)
|
||||
@@ -3272,15 +3277,15 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(w http.ResponseWriter, r *h
|
||||
lookbackSeconds = 15 * 60
|
||||
}
|
||||
|
||||
connectionStatus, apiErr := aH.calculateConnectionStatus(
|
||||
connectionStatus, err := aH.calculateConnectionStatus(
|
||||
r.Context(), orgID, connectionTests, lookbackSeconds,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, "Failed to calculate integration connection status")
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, connectionStatus)
|
||||
render.Success(w, http.StatusOK, connectionStatus)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) calculateConnectionStatus(
|
||||
@@ -3288,10 +3293,11 @@ func (aH *APIHandler) calculateConnectionStatus(
|
||||
orgID valuer.UUID,
|
||||
connectionTests *integrations.IntegrationConnectionTests,
|
||||
lookbackSeconds int64,
|
||||
) (*integrations.IntegrationConnectionStatus, *model.ApiError) {
|
||||
) (*integrations.IntegrationConnectionStatus, error) {
|
||||
// Calculate connection status for signals in parallel
|
||||
|
||||
result := &integrations.IntegrationConnectionStatus{}
|
||||
// TODO: migrate to errors package
|
||||
errors := []*model.ApiError{}
|
||||
var resultLock sync.Mutex
|
||||
|
||||
@@ -3489,12 +3495,14 @@ func (aH *APIHandler) UninstallIntegration(w http.ResponseWriter, r *http.Reques
|
||||
aH.Respond(w, map[string]interface{}{})
|
||||
}
|
||||
|
||||
// cloud provider integrations
|
||||
// RegisterCloudIntegrationsRoutes register routes for cloud provider integrations
|
||||
func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v1/cloud-integrations").Subrouter()
|
||||
|
||||
subRouter.Use(middleware.NewRecovery(aH.Signoz.Instrumentation.Logger()).Wrap)
|
||||
|
||||
subRouter.HandleFunc(
|
||||
"/{cloudProvider}/accounts/generate-connection-url", am.EditAccess(aH.CloudIntegrationsGenerateConnectionUrl),
|
||||
"/{cloudProvider}/accounts/generate-connection-url", am.EditAccess(aH.CloudIntegrationsGenerateConnectionArtifact),
|
||||
).Methods(http.MethodPost)
|
||||
|
||||
subRouter.HandleFunc(
|
||||
@@ -3528,170 +3536,199 @@ func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *mi
|
||||
subRouter.HandleFunc(
|
||||
"/{cloudProvider}/services/{serviceId}/config", am.EditAccess(aH.CloudIntegrationsUpdateServiceConfig),
|
||||
).Methods(http.MethodPost)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
|
||||
r.Context(), claims.OrgID, cloudProvider,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
req := cloudintegrations.GenerateConnectionUrlRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
|
||||
r.Context(), claims.OrgID, cloudProvider, req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
accountId := mux.Vars(r)["accountId"]
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
|
||||
r.Context(), claims.OrgID, cloudProvider, accountId,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
req := cloudintegrations.AgentCheckInRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.CloudIntegrationsController.CheckInAsAgent(
|
||||
r.Context(), claims.OrgID, cloudProvider, req,
|
||||
)
|
||||
func (aH *APIHandler) CloudIntegrationsGenerateConnectionArtifact(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, result)
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
reqBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
render.Error(w, errors.WrapInternalf(err, errors.CodeInternal, "failed to read request body"))
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GenerateConnectionArtifact(r.Context(), &integrationstypes.PostableConnectionArtifact{
|
||||
OrgID: claims.OrgID,
|
||||
Data: reqBody,
|
||||
})
|
||||
if err != nil {
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(r.Context(),
|
||||
"failed to generate connection artifact for cloud integration",
|
||||
slog.String("cloudProvider", cloudProviderString),
|
||||
slog.String("orgID", claims.OrgID),
|
||||
)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].ListConnectedAccounts(r.Context(), claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsGetAccountStatus(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
accountId := mux.Vars(r)["accountId"]
|
||||
|
||||
req := cloudintegrations.UpdateAccountConfigRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GetAccountStatus(r.Context(), claims.OrgID, accountId)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
|
||||
r.Context(), claims.OrgID, cloudProvider, accountId, req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, result)
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
func (aH *APIHandler) CloudIntegrationsAgentCheckIn(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req := new(integrationstypes.PostableAgentCheckInPayload)
|
||||
if err = json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
render.Error(w, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid request body"))
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
req.OrgID = claims.OrgID
|
||||
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].AgentCheckIn(r.Context(), req)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
accountId := mux.Vars(r)["accountId"]
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
reqBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
render.Error(w, errors.WrapInternalf(err, errors.CodeInternal, "failed to read request body"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
|
||||
r.Context(), claims.OrgID, cloudProvider, accountId,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].UpdateAccountConfig(r.Context(), &integrationstypes.PatchableAccountConfig{
|
||||
OrgID: claims.OrgID,
|
||||
AccountId: accountId,
|
||||
Data: reqBody,
|
||||
})
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, result)
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
return
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsListServices(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
func (aH *APIHandler) CloudIntegrationsDisconnectAccount(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
accountId := mux.Vars(r)["accountId"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.cloudIntegrationsRegistry[cloudProvider].DisconnectAccount(r.Context(), claims.OrgID, accountId)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsListServices(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
var cloudAccountId *string
|
||||
|
||||
@@ -3700,26 +3737,22 @@ func (aH *APIHandler) CloudIntegrationsListServices(
|
||||
cloudAccountId = &cloudAccountIdQP
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
resp, apiErr := aH.CloudIntegrationsController.ListServices(
|
||||
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].ListServices(r.Context(), claims.OrgID, cloudAccountId)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
func (aH *APIHandler) CloudIntegrationsGetServiceDetails(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
@@ -3731,7 +3764,14 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
|
||||
return
|
||||
}
|
||||
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
serviceId := mux.Vars(r)["serviceId"]
|
||||
|
||||
var cloudAccountId *string
|
||||
@@ -3741,270 +3781,59 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
|
||||
cloudAccountId = &cloudAccountIdQP
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := aH.CloudIntegrationsController.GetServiceDetails(
|
||||
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
|
||||
)
|
||||
resp, err := aH.cloudIntegrationsRegistry[cloudProvider].GetServiceDetails(r.Context(), &integrationstypes.GetServiceDetailsReq{
|
||||
OrgID: orgID.String(),
|
||||
ServiceId: serviceId,
|
||||
CloudAccountID: cloudAccountId,
|
||||
})
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Add connection status for the 2 signals.
|
||||
if cloudAccountId != nil {
|
||||
connStatus, apiErr := aH.calculateCloudIntegrationServiceConnectionStatus(
|
||||
r.Context(), orgID, cloudProvider, *cloudAccountId, resp,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
resp.ConnectionStatus = connStatus
|
||||
}
|
||||
|
||||
aH.Respond(w, resp)
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
return
|
||||
}
|
||||
|
||||
func (aH *APIHandler) calculateCloudIntegrationServiceConnectionStatus(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
cloudProvider string,
|
||||
cloudAccountId string,
|
||||
svcDetails *cloudintegrations.ServiceDetails,
|
||||
) (*cloudintegrations.ServiceConnectionStatus, *model.ApiError) {
|
||||
if cloudProvider != "aws" {
|
||||
// TODO(Raj): Make connection check generic for all providers in a follow up change
|
||||
return nil, model.BadRequest(
|
||||
fmt.Errorf("unsupported cloud provider: %s", cloudProvider),
|
||||
)
|
||||
}
|
||||
func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(w http.ResponseWriter, r *http.Request) {
|
||||
cloudProviderString := mux.Vars(r)["cloudProvider"]
|
||||
|
||||
telemetryCollectionStrategy := svcDetails.Strategy
|
||||
if telemetryCollectionStrategy == nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"service doesn't have telemetry collection strategy: %s", svcDetails.Id,
|
||||
))
|
||||
}
|
||||
|
||||
result := &cloudintegrations.ServiceConnectionStatus{}
|
||||
errors := []*model.ApiError{}
|
||||
var resultLock sync.Mutex
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Calculate metrics connection status
|
||||
if telemetryCollectionStrategy.AWSMetrics != nil {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
metricsConnStatus, apiErr := aH.calculateAWSIntegrationSvcMetricsConnectionStatus(
|
||||
ctx, cloudAccountId, telemetryCollectionStrategy.AWSMetrics, svcDetails.DataCollected.Metrics,
|
||||
)
|
||||
|
||||
resultLock.Lock()
|
||||
defer resultLock.Unlock()
|
||||
|
||||
if apiErr != nil {
|
||||
errors = append(errors, apiErr)
|
||||
} else {
|
||||
result.Metrics = metricsConnStatus
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// Calculate logs connection status
|
||||
if telemetryCollectionStrategy.AWSLogs != nil {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
logsConnStatus, apiErr := aH.calculateAWSIntegrationSvcLogsConnectionStatus(
|
||||
ctx, orgID, cloudAccountId, telemetryCollectionStrategy.AWSLogs,
|
||||
)
|
||||
|
||||
resultLock.Lock()
|
||||
defer resultLock.Unlock()
|
||||
|
||||
if apiErr != nil {
|
||||
errors = append(errors, apiErr)
|
||||
} else {
|
||||
result.Logs = logsConnStatus
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
if len(errors) > 0 {
|
||||
return nil, errors[0]
|
||||
}
|
||||
|
||||
return result, nil
|
||||
|
||||
}
|
||||
func (aH *APIHandler) calculateAWSIntegrationSvcMetricsConnectionStatus(
|
||||
ctx context.Context,
|
||||
cloudAccountId string,
|
||||
strategy *services.AWSMetricsStrategy,
|
||||
metricsCollectedBySvc []services.CollectedMetric,
|
||||
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
|
||||
if strategy == nil || len(strategy.StreamFilters) < 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
expectedLabelValues := map[string]string{
|
||||
"cloud_provider": "aws",
|
||||
"cloud_account_id": cloudAccountId,
|
||||
}
|
||||
|
||||
metricsNamespace := strategy.StreamFilters[0].Namespace
|
||||
metricsNamespaceParts := strings.Split(metricsNamespace, "/")
|
||||
|
||||
if len(metricsNamespaceParts) >= 2 {
|
||||
expectedLabelValues["service_namespace"] = metricsNamespaceParts[0]
|
||||
expectedLabelValues["service_name"] = metricsNamespaceParts[1]
|
||||
} else {
|
||||
// metrics for single word namespaces like "CWAgent" do not
|
||||
// have the service_namespace label populated
|
||||
expectedLabelValues["service_name"] = metricsNamespaceParts[0]
|
||||
}
|
||||
|
||||
metricNamesCollectedBySvc := []string{}
|
||||
for _, cm := range metricsCollectedBySvc {
|
||||
metricNamesCollectedBySvc = append(metricNamesCollectedBySvc, cm.Name)
|
||||
}
|
||||
|
||||
statusForLastReceivedMetric, apiErr := aH.reader.GetLatestReceivedMetric(
|
||||
ctx, metricNamesCollectedBySvc, expectedLabelValues,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
if statusForLastReceivedMetric != nil {
|
||||
return &cloudintegrations.SignalConnectionStatus{
|
||||
LastReceivedTsMillis: statusForLastReceivedMetric.LastReceivedTsMillis,
|
||||
LastReceivedFrom: "signoz-aws-integration",
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (aH *APIHandler) calculateAWSIntegrationSvcLogsConnectionStatus(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
cloudAccountId string,
|
||||
strategy *services.AWSLogsStrategy,
|
||||
) (*cloudintegrations.SignalConnectionStatus, *model.ApiError) {
|
||||
if strategy == nil || len(strategy.Subscriptions) < 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
logGroupNamePrefix := strategy.Subscriptions[0].LogGroupNamePrefix
|
||||
if len(logGroupNamePrefix) < 1 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
logsConnTestFilter := &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "cloud.account.id",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: cloudAccountId,
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "aws.cloudwatch.log_group_name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: "like",
|
||||
Value: logGroupNamePrefix + "%",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// TODO(Raj): Receive this as a param from UI in the future.
|
||||
lookbackSeconds := int64(30 * 60)
|
||||
|
||||
qrParams := &v3.QueryRangeParamsV3{
|
||||
Start: time.Now().UnixMilli() - (lookbackSeconds * 1000),
|
||||
End: time.Now().UnixMilli(),
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
PanelType: v3.PanelTypeList,
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
PageSize: 1,
|
||||
Filters: logsConnTestFilter,
|
||||
QueryName: "A",
|
||||
DataSource: v3.DataSourceLogs,
|
||||
Expression: "A",
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
queryRes, _, err := aH.querier.QueryRange(
|
||||
ctx, orgID, qrParams,
|
||||
)
|
||||
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(fmt.Errorf(
|
||||
"could not query for integration connection status: %w", err,
|
||||
))
|
||||
}
|
||||
if len(queryRes) > 0 && queryRes[0].List != nil && len(queryRes[0].List) > 0 {
|
||||
lastLog := queryRes[0].List[0]
|
||||
|
||||
return &cloudintegrations.SignalConnectionStatus{
|
||||
LastReceivedTsMillis: lastLog.Timestamp.UnixMilli(),
|
||||
LastReceivedFrom: "signoz-aws-integration",
|
||||
}, nil
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||
serviceId := mux.Vars(r)["serviceId"]
|
||||
|
||||
req := cloudintegrations.UpdateServiceConfigRequest{}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.CloudIntegrationsController.UpdateServiceConfig(
|
||||
r.Context(), claims.OrgID, cloudProvider, serviceId, &req,
|
||||
)
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, result)
|
||||
reqBody, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
render.Error(w, errors.WrapInternalf(err,
|
||||
errors.CodeInternal,
|
||||
"failed to read update service config request body",
|
||||
))
|
||||
return
|
||||
}
|
||||
|
||||
result, err := aH.cloudIntegrationsRegistry[cloudProvider].UpdateServiceConfig(
|
||||
r.Context(), &integrationstypes.PatchableServiceConfig{
|
||||
OrgID: claims.OrgID,
|
||||
ServiceId: serviceId,
|
||||
Config: reqBody,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
// logs
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -107,7 +108,7 @@ type IntegrationsListItem struct {
|
||||
|
||||
type Integration struct {
|
||||
IntegrationDetails
|
||||
Installation *types.InstalledIntegration `json:"installation"`
|
||||
Installation *integrationstypes.InstalledIntegration `json:"installation"`
|
||||
}
|
||||
|
||||
type Manager struct {
|
||||
@@ -223,7 +224,7 @@ func (m *Manager) InstallIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
config integrationstypes.InstalledIntegrationConfig,
|
||||
) (*IntegrationsListItem, *model.ApiError) {
|
||||
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
|
||||
if apiErr != nil {
|
||||
@@ -429,7 +430,7 @@ func (m *Manager) getInstalledIntegration(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationId string,
|
||||
) (*types.InstalledIntegration, *model.ApiError) {
|
||||
) (*integrationstypes.InstalledIntegration, *model.ApiError) {
|
||||
iis, apiErr := m.installedIntegrationsRepo.get(
|
||||
ctx, orgId, []string{integrationId},
|
||||
)
|
||||
@@ -457,7 +458,7 @@ func (m *Manager) getInstalledIntegrations(
|
||||
return nil, apiErr
|
||||
}
|
||||
|
||||
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
|
||||
installedTypes := utils.MapSlice(installations, func(i integrationstypes.InstalledIntegration) string {
|
||||
return i.Type
|
||||
})
|
||||
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
|
||||
|
||||
@@ -4,22 +4,22 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
)
|
||||
|
||||
type InstalledIntegrationsRepo interface {
|
||||
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
|
||||
list(ctx context.Context, orgId string) ([]integrationstypes.InstalledIntegration, *model.ApiError)
|
||||
|
||||
get(
|
||||
ctx context.Context, orgId string, integrationTypes []string,
|
||||
) (map[string]types.InstalledIntegration, *model.ApiError)
|
||||
) (map[string]integrationstypes.InstalledIntegration, *model.ApiError)
|
||||
|
||||
upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationType string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*types.InstalledIntegration, *model.ApiError)
|
||||
config integrationstypes.InstalledIntegrationConfig,
|
||||
) (*integrationstypes.InstalledIntegration, *model.ApiError)
|
||||
|
||||
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -26,8 +27,8 @@ func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
|
||||
func (r *InstalledIntegrationsSqliteRepo) list(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
) ([]types.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []types.InstalledIntegration{}
|
||||
) ([]integrationstypes.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []integrationstypes.InstalledIntegration{}
|
||||
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&integrations).
|
||||
@@ -44,8 +45,8 @@ func (r *InstalledIntegrationsSqliteRepo) list(
|
||||
|
||||
func (r *InstalledIntegrationsSqliteRepo) get(
|
||||
ctx context.Context, orgId string, integrationTypes []string,
|
||||
) (map[string]types.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []types.InstalledIntegration{}
|
||||
) (map[string]integrationstypes.InstalledIntegration, *model.ApiError) {
|
||||
integrations := []integrationstypes.InstalledIntegration{}
|
||||
|
||||
typeValues := []interface{}{}
|
||||
for _, integrationType := range integrationTypes {
|
||||
@@ -62,7 +63,7 @@ func (r *InstalledIntegrationsSqliteRepo) get(
|
||||
))
|
||||
}
|
||||
|
||||
result := map[string]types.InstalledIntegration{}
|
||||
result := map[string]integrationstypes.InstalledIntegration{}
|
||||
for _, ii := range integrations {
|
||||
result[ii.Type] = ii
|
||||
}
|
||||
@@ -74,10 +75,10 @@ func (r *InstalledIntegrationsSqliteRepo) upsert(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
integrationType string,
|
||||
config types.InstalledIntegrationConfig,
|
||||
) (*types.InstalledIntegration, *model.ApiError) {
|
||||
config integrationstypes.InstalledIntegrationConfig,
|
||||
) (*integrationstypes.InstalledIntegration, *model.ApiError) {
|
||||
|
||||
integration := types.InstalledIntegration{
|
||||
integration := integrationstypes.InstalledIntegration{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
@@ -114,7 +115,7 @@ func (r *InstalledIntegrationsSqliteRepo) delete(
|
||||
ctx context.Context, orgId string, integrationType string,
|
||||
) *model.ApiError {
|
||||
_, dbErr := r.store.BunDB().NewDelete().
|
||||
Model(&types.InstalledIntegration{}).
|
||||
Model(&integrationstypes.InstalledIntegration{}).
|
||||
Where("type = ?", integrationType).
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
|
||||
@@ -71,10 +71,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cloudIntegrationsRegistry := cloudintegrations.NewCloudProviderRegistry(signoz.Instrumentation.Logger(), signoz.SQLStore, signoz.Querier)
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
@@ -127,7 +124,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
Reader: reader,
|
||||
RuleManager: rm,
|
||||
IntegrationsController: integrationsController,
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
CloudIntegrationsRegistry: cloudIntegrationsRegistry,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
@@ -135,7 +132,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -167,7 +167,6 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
|
||||
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -389,8 +389,6 @@ func New(
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
|
||||
@@ -440,7 +438,6 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
|
||||
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addRootUser struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addRootUser{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
column := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("is_root"),
|
||||
DataType: sqlschema.DataTypeBoolean,
|
||||
Nullable: false,
|
||||
}
|
||||
|
||||
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -1,247 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type IntegrationUserEmail string
|
||||
|
||||
const (
|
||||
AWSIntegrationUserEmail IntegrationUserEmail = "aws-integration@signoz.io"
|
||||
)
|
||||
|
||||
var AllIntegrationUserEmails = []IntegrationUserEmail{
|
||||
AWSIntegrationUserEmail,
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Normal integration uses just the installed_integration table
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
type InstalledIntegration struct {
|
||||
bun.BaseModel `bun:"table:installed_integration"`
|
||||
|
||||
Identifiable
|
||||
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
|
||||
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
|
||||
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
|
||||
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
|
||||
}
|
||||
|
||||
type InstalledIntegrationConfig map[string]interface{}
|
||||
|
||||
// For serializing from db
|
||||
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
|
||||
filterSetJson, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not serialize integration config to JSON")
|
||||
}
|
||||
return filterSetJson, nil
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Cloud integration uses the cloud_integration table
|
||||
// and cloud_integrations_service table
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
type CloudIntegration struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration"`
|
||||
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
|
||||
Config *AccountConfig `json:"config" bun:"config,type:text"`
|
||||
AccountID *string `json:"account_id" bun:"account_id,type:text"`
|
||||
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
|
||||
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
|
||||
OrgID string `bun:"org_id,type:text,unique:provider_id"`
|
||||
}
|
||||
|
||||
func (a *CloudIntegration) Status() AccountStatus {
|
||||
status := AccountStatus{}
|
||||
if a.LastAgentReport != nil {
|
||||
lastHeartbeat := a.LastAgentReport.TimestampMillis
|
||||
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
func (a *CloudIntegration) Account() Account {
|
||||
ca := Account{Id: a.ID.StringValue(), Status: a.Status()}
|
||||
|
||||
if a.AccountID != nil {
|
||||
ca.CloudAccountId = *a.AccountID
|
||||
}
|
||||
|
||||
if a.Config != nil {
|
||||
ca.Config = *a.Config
|
||||
} else {
|
||||
ca.Config = DefaultAccountConfig()
|
||||
}
|
||||
return ca
|
||||
}
|
||||
|
||||
type Account struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config AccountConfig `json:"config"`
|
||||
Status AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
type AccountStatus struct {
|
||||
Integration AccountIntegrationStatus `json:"integration"`
|
||||
}
|
||||
|
||||
type AccountIntegrationStatus struct {
|
||||
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
||||
}
|
||||
|
||||
func DefaultAccountConfig() AccountConfig {
|
||||
return AccountConfig{
|
||||
EnabledRegions: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
type AccountConfig struct {
|
||||
EnabledRegions []string `json:"regions"`
|
||||
}
|
||||
|
||||
// For serializing from db
|
||||
func (c *AccountConfig) Scan(src any) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (c *AccountConfig) Value() (driver.Value, error) {
|
||||
if c == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
|
||||
}
|
||||
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
|
||||
return string(serialized), nil
|
||||
}
|
||||
|
||||
type AgentReport struct {
|
||||
TimestampMillis int64 `json:"timestamp_millis"`
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
// For serializing from db
|
||||
func (r *AgentReport) Scan(src any) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, r)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (r *AgentReport) Value() (driver.Value, error) {
|
||||
if r == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(r)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
|
||||
)
|
||||
}
|
||||
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
|
||||
return string(serialized), nil
|
||||
}
|
||||
|
||||
type CloudIntegrationService struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
|
||||
Config CloudServiceConfig `bun:"config,type:text"`
|
||||
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
|
||||
}
|
||||
|
||||
type CloudServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
||||
}
|
||||
|
||||
type CloudServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
type CloudServiceConfig struct {
|
||||
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
|
||||
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
|
||||
}
|
||||
|
||||
// For serializing from db
|
||||
func (c *CloudServiceConfig) Scan(src any) error {
|
||||
var data []byte
|
||||
switch src := src.(type) {
|
||||
case []byte:
|
||||
data = src
|
||||
case string:
|
||||
data = []byte(src)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// For serializing to db
|
||||
func (c *CloudServiceConfig) Value() (driver.Value, error) {
|
||||
if c == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "cloud service config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
|
||||
)
|
||||
}
|
||||
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
|
||||
return string(serialized), nil
|
||||
}
|
||||
724
pkg/types/integrationstypes/cloudintegration.go
Normal file
724
pkg/types/integrationstypes/cloudintegration.go
Normal file
@@ -0,0 +1,724 @@
|
||||
package integrationstypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
// CloudProvider defines the interface to be implemented by different cloud providers.
|
||||
// This is generic interface so it will be accepting and returning generic types instead of concrete.
|
||||
// It's the cloud provider's responsibility to cast them to appropriate types and validate
|
||||
type CloudProvider interface {
|
||||
GetName() CloudProviderType
|
||||
|
||||
AgentCheckIn(ctx context.Context, req *PostableAgentCheckInPayload) (any, error)
|
||||
GenerateConnectionArtifact(ctx context.Context, req *PostableConnectionArtifact) (any, error)
|
||||
GetAccountStatus(ctx context.Context, orgID, accountID string) (*GettableAccountStatus, error)
|
||||
|
||||
ListServices(ctx context.Context, orgID string, accountID *string) (any, error) // returns either GettableAWSServices or GettableAzureServices
|
||||
GetServiceDetails(ctx context.Context, req *GetServiceDetailsReq) (any, error)
|
||||
ListConnectedAccounts(ctx context.Context, orgID string) (*GettableConnectedAccountsList, error)
|
||||
GetDashboard(ctx context.Context, req *GettableDashboard) (*dashboardtypes.Dashboard, error)
|
||||
GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
||||
|
||||
UpdateAccountConfig(ctx context.Context, req *PatchableAccountConfig) (any, error) // req can be either PatchableAWSAccountConfig or PatchableAzureAccountConfig
|
||||
UpdateServiceConfig(ctx context.Context, req *PatchableServiceConfig) (any, error)
|
||||
|
||||
DisconnectAccount(ctx context.Context, orgID, accountID string) (*CloudIntegration, error)
|
||||
}
|
||||
|
||||
type GettableDashboard struct {
|
||||
ID string
|
||||
OrgID valuer.UUID
|
||||
}
|
||||
|
||||
type GettableCloudIntegrationConnectionParams struct {
|
||||
IngestionUrl string `json:"ingestion_url,omitempty"`
|
||||
IngestionKey string `json:"ingestion_key,omitempty"`
|
||||
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
|
||||
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
|
||||
}
|
||||
|
||||
type GettableIngestionKey struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
// other attributes from gateway response not included here since they are not being used.
|
||||
}
|
||||
|
||||
type GettableIngestionKeysSearch struct {
|
||||
Status string `json:"status"`
|
||||
Data []GettableIngestionKey `json:"data"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
type GettableCreateIngestionKey struct {
|
||||
Status string `json:"status"`
|
||||
Data GettableIngestionKey `json:"data"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
type GettableDeployment struct {
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
type GettableConnectedAccountsList struct {
|
||||
Accounts []*Account `json:"accounts"`
|
||||
}
|
||||
|
||||
// SigNozAWSAgentConfig represents requirements for agent deployment in user's AWS account
|
||||
type SigNozAWSAgentConfig struct {
|
||||
// The region in which SigNoz agent should be installed.
|
||||
Region string `json:"region"`
|
||||
|
||||
IngestionUrl string `json:"ingestion_url"`
|
||||
IngestionKey string `json:"ingestion_key"`
|
||||
SigNozAPIUrl string `json:"signoz_api_url"`
|
||||
SigNozAPIKey string `json:"signoz_api_key"`
|
||||
|
||||
Version string `json:"version,omitempty"`
|
||||
}
|
||||
|
||||
type PostableConnectionArtifact struct {
|
||||
OrgID string
|
||||
Data []byte // either PostableAWSConnectionUrl or PostableAzureConnectionCommand
|
||||
}
|
||||
|
||||
type PostableAWSConnectionUrl struct {
|
||||
// Optional. To be specified for updates.
|
||||
// TODO: evaluate and remove if not needed.
|
||||
AccountId *string `json:"account_id,omitempty"`
|
||||
AccountConfig *AWSAccountConfig `json:"account_config"`
|
||||
AgentConfig *SigNozAWSAgentConfig `json:"agent_config"`
|
||||
}
|
||||
|
||||
type PostableAzureConnectionCommand struct {
|
||||
AgentConfig *SigNozAzureAgentConfig `json:"agent_config"`
|
||||
AccountConfig *AzureAccountConfig `json:"account_config"`
|
||||
}
|
||||
|
||||
func (p *PostableAWSConnectionUrl) Unmarshal(src any) error {
|
||||
var data []byte
|
||||
switch src := src.(type) {
|
||||
case []byte:
|
||||
data = src
|
||||
case string:
|
||||
data = []byte(src)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
err := json.Unmarshal(data, p)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize aws connection url request from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *PostableAzureConnectionCommand) Unmarshal(src any) error {
|
||||
var data []byte
|
||||
switch src := src.(type) {
|
||||
case []byte:
|
||||
data = src
|
||||
case string:
|
||||
data = []byte(src)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
err := json.Unmarshal(data, p)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize azure connection command request from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type SigNozAzureAgentConfig struct {
|
||||
IngestionUrl string `json:"ingestion_url"`
|
||||
IngestionKey string `json:"ingestion_key"`
|
||||
SigNozAPIUrl string `json:"signoz_api_url"`
|
||||
SigNozAPIKey string `json:"signoz_api_key"`
|
||||
|
||||
Version string `json:"version,omitempty"`
|
||||
}
|
||||
|
||||
type GettableAWSConnectionUrl struct {
|
||||
AccountId string `json:"account_id"`
|
||||
ConnectionUrl string `json:"connection_url"`
|
||||
}
|
||||
|
||||
type GettableAzureConnectionCommand struct {
|
||||
AccountId string `json:"account_id"`
|
||||
AzureShellConnectionCommand string `json:"az_shell_connection_command"`
|
||||
AzureCliConnectionCommand string `json:"az_cli_connection_command"`
|
||||
}
|
||||
|
||||
type GettableAccountStatus struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||
Status AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
type PostableAgentCheckInPayload struct {
|
||||
ID string `json:"account_id"`
|
||||
AccountID string `json:"cloud_account_id"`
|
||||
// Arbitrary cloud specific Agent data
|
||||
Data map[string]any `json:"data,omitempty"`
|
||||
OrgID string `json:"-"`
|
||||
}
|
||||
|
||||
type GettableAWSAgentCheckIn struct {
|
||||
AccountId string `json:"account_id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
RemovedAt *time.Time `json:"removed_at"`
|
||||
|
||||
IntegrationConfig AWSAgentIntegrationConfig `json:"integration_config"`
|
||||
}
|
||||
|
||||
type AWSAgentIntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabled_regions"`
|
||||
TelemetryCollectionStrategy *AWSCollectionStrategy `json:"telemetry,omitempty"`
|
||||
}
|
||||
|
||||
type AzureAgentIntegrationConfig struct {
|
||||
DeploymentRegion string `json:"deployment_region"` // will not be changed once set
|
||||
EnabledResourceGroups []string `json:"resource_groups"`
|
||||
// TelemetryCollectionStrategy is map of service to telemetry config
|
||||
TelemetryCollectionStrategy map[string]*AzureCollectionStrategy `json:"telemetry,omitempty"`
|
||||
}
|
||||
|
||||
type GettableAzureAgentCheckIn struct {
|
||||
AccountId string `json:"account_id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
RemovedAt *time.Time `json:"removed_at"`
|
||||
IntegrationConfig AzureAgentIntegrationConfig `json:"integration_config"`
|
||||
}
|
||||
|
||||
type PatchableServiceConfig struct {
|
||||
OrgID string `json:"org_id"`
|
||||
ServiceId string `json:"service_id"`
|
||||
Config []byte `json:"config"` // json serialized config
|
||||
}
|
||||
|
||||
type PatchableAWSCloudServiceConfig struct {
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config *AWSCloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type AWSCloudServiceConfig struct {
|
||||
Logs *AWSCloudServiceLogsConfig `json:"logs,omitempty"`
|
||||
Metrics *AWSCloudServiceMetricsConfig `json:"metrics,omitempty"`
|
||||
}
|
||||
|
||||
// Unmarshal unmarshalls data from src
|
||||
func (c *PatchableAWSCloudServiceConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, c)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize aws service config req from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes data to bytes
|
||||
func (c *PatchableAWSCloudServiceConfig) Marshal() ([]byte, error) {
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize aws service config req to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
// Unmarshal unmarshalls data from src
|
||||
func (a *AWSCloudServiceConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, a)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes data to bytes
|
||||
func (a *AWSCloudServiceConfig) Marshal() ([]byte, error) {
|
||||
serialized, err := json.Marshal(a)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
func (a *AWSCloudServiceConfig) Validate(def *AWSServiceDefinition) error {
|
||||
if def.Id != S3Sync && a.Logs != nil && a.Logs.S3Buckets != nil {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "s3 buckets can only be added to service-type[%s]", S3Sync)
|
||||
} else if def.Id == S3Sync && a.Logs != nil && a.Logs.S3Buckets != nil {
|
||||
for region := range a.Logs.S3Buckets {
|
||||
if _, found := ValidAWSRegions[region]; !found {
|
||||
return errors.NewInvalidInputf(CodeInvalidCloudRegion, "invalid cloud region: %s", region)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AzureCloudServiceConfig) Validate(def *AzureServiceDefinition) error {
|
||||
logsMap := make(map[string]bool)
|
||||
metricsMap := make(map[string]bool)
|
||||
|
||||
for _, log := range def.Strategy.AzureLogs {
|
||||
logsMap[log.Name] = true
|
||||
}
|
||||
|
||||
for _, metric := range def.Strategy.AzureMetrics {
|
||||
metricsMap[metric.Name] = true
|
||||
}
|
||||
|
||||
for _, log := range a.Logs {
|
||||
if _, found := logsMap[log.Name]; !found {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid log name: %s", log.Name)
|
||||
}
|
||||
}
|
||||
|
||||
for _, metric := range a.Metrics {
|
||||
if _, found := metricsMap[metric.Name]; !found {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid metric name: %s", metric.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type AzureCloudServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type AzureCloudServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type PatchServiceConfigResponse struct {
|
||||
ServiceId string `json:"id"`
|
||||
Config any `json:"config"`
|
||||
}
|
||||
|
||||
type PatchableAccountConfig struct {
|
||||
OrgID string
|
||||
AccountId string
|
||||
Data []byte // can be either AWSAccountConfig or AzureAccountConfig
|
||||
}
|
||||
|
||||
type PatchableAWSAccountConfig struct {
|
||||
Config *AWSAccountConfig `json:"config"`
|
||||
}
|
||||
|
||||
type PatchableAzureAccountConfig struct {
|
||||
Config *AzureAccountConfig `json:"config"`
|
||||
}
|
||||
|
||||
func (p *PatchableAWSAccountConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, p)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize patchable account config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *PatchableAWSAccountConfig) Marshal() ([]byte, error) {
|
||||
if p == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "patchable account config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(p)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize patchable account config to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
func (p *PatchableAzureAccountConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, p)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize patchable account config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *PatchableAzureAccountConfig) Marshal() ([]byte, error) {
|
||||
if p == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "patchable account config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(p)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize patchable account config to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type AWSAccountConfig struct {
|
||||
EnabledRegions []string `json:"regions"`
|
||||
}
|
||||
|
||||
type AzureAccountConfig struct {
|
||||
DeploymentRegion string `json:"deployment_region,omitempty"`
|
||||
EnabledResourceGroups []string `json:"resource_groups,omitempty"`
|
||||
}
|
||||
|
||||
// Unmarshal unmarshalls data from src
|
||||
func (c *AWSAccountConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, c)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize AWS account config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes data to bytes
|
||||
func (c *AWSAccountConfig) Marshal() ([]byte, error) {
|
||||
if c == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
// Unmarshal unmarshalls data from src
|
||||
func (c *AzureAccountConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, c)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize azure account config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes data to bytes
|
||||
func (c *AzureAccountConfig) Marshal() ([]byte, error) {
|
||||
if c == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "cloud account config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type GettableAWSServices struct {
|
||||
Services []AWSServiceSummary `json:"services"`
|
||||
}
|
||||
|
||||
type GettableAzureServices struct {
|
||||
Services []AzureServiceSummary `json:"services"`
|
||||
}
|
||||
|
||||
type PatchableAzureCloudServiceConfig struct {
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config *AzureCloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type AzureCloudServiceConfig struct {
|
||||
Logs []*AzureCloudServiceLogsConfig `json:"logs,omitempty"`
|
||||
Metrics []*AzureCloudServiceMetricsConfig `json:"metrics,omitempty"`
|
||||
}
|
||||
|
||||
// Unmarshal unmarshalls data from src
|
||||
func (c *PatchableAzureCloudServiceConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, c)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes data to bytes
|
||||
func (c *PatchableAzureCloudServiceConfig) Marshal() ([]byte, error) {
|
||||
if c == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "cloud service config is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
func (a *AzureCloudServiceConfig) Unmarshal(src []byte) error {
|
||||
err := json.Unmarshal(src, a)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't deserialize cloud service config from JSON",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AzureCloudServiceConfig) Marshal() ([]byte, error) {
|
||||
serialized, err := json.Marshal(a)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type GetServiceDetailsReq struct {
|
||||
OrgID string
|
||||
ServiceId string
|
||||
CloudAccountID *string
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// DATABASE TYPES
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Cloud integration uses the cloud_integration table
|
||||
// and cloud_integrations_service table
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
type CloudIntegration struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
|
||||
Config []byte `json:"config" bun:"config,type:text"` // json serialized config
|
||||
AccountID *string `json:"account_id" bun:"account_id,type:text"`
|
||||
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
|
||||
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
|
||||
OrgID string `bun:"org_id,type:text,unique:provider_id"`
|
||||
}
|
||||
|
||||
func (a *CloudIntegration) Status() AccountStatus {
|
||||
status := AccountStatus{}
|
||||
if a.LastAgentReport != nil {
|
||||
lastHeartbeat := a.LastAgentReport.TimestampMillis
|
||||
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
func (a *CloudIntegration) Account(cloudProvider CloudProviderType) *Account {
|
||||
ca := &Account{Id: a.ID.StringValue(), Status: a.Status()}
|
||||
|
||||
if a.AccountID != nil {
|
||||
ca.CloudAccountId = *a.AccountID
|
||||
}
|
||||
|
||||
ca.Config = map[string]interface{}{}
|
||||
|
||||
if len(a.Config) < 1 {
|
||||
return ca
|
||||
}
|
||||
|
||||
switch cloudProvider {
|
||||
case CloudProviderAWS:
|
||||
config := new(AWSAccountConfig)
|
||||
_ = config.Unmarshal(a.Config)
|
||||
ca.Config = config
|
||||
case CloudProviderAzure:
|
||||
config := new(AzureAccountConfig)
|
||||
_ = config.Unmarshal(a.Config)
|
||||
ca.Config = config
|
||||
default:
|
||||
}
|
||||
|
||||
return ca
|
||||
}
|
||||
|
||||
type Account struct {
|
||||
Id string `json:"id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
Config any `json:"config"` // AWSAccountConfig or AzureAccountConfig
|
||||
Status AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
type AccountStatus struct {
|
||||
Integration AccountIntegrationStatus `json:"integration"`
|
||||
}
|
||||
|
||||
type AccountIntegrationStatus struct {
|
||||
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
||||
}
|
||||
|
||||
func DefaultAWSAccountConfig() AWSAccountConfig {
|
||||
return AWSAccountConfig{
|
||||
EnabledRegions: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
func DefaultAzureAccountConfig() AzureAccountConfig {
|
||||
return AzureAccountConfig{
|
||||
DeploymentRegion: "",
|
||||
EnabledResourceGroups: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
type AWSServiceSummary struct {
|
||||
DefinitionMetadata
|
||||
Config *AWSCloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type AzureServiceSummary struct {
|
||||
DefinitionMetadata
|
||||
Config *AzureCloudServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type GettableAWSServiceDetails struct {
|
||||
AWSServiceDefinition
|
||||
Config *AWSCloudServiceConfig `json:"config"`
|
||||
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type GettableAzureServiceDetails struct {
|
||||
AzureServiceDefinition
|
||||
Config *AzureCloudServiceConfig `json:"config"`
|
||||
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type ServiceConnectionStatus struct {
|
||||
Logs []*SignalConnectionStatus `json:"logs"`
|
||||
Metrics []*SignalConnectionStatus `json:"metrics"`
|
||||
}
|
||||
|
||||
type SignalConnectionStatus struct {
|
||||
CategoryID string `json:"category"`
|
||||
CategoryDisplayName string `json:"category_display_name"`
|
||||
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
|
||||
LastReceivedFrom string `json:"last_received_from"` // resource identifier
|
||||
}
|
||||
|
||||
//// AddAWSServiceStrategy is a helper for accumulating strategies for enabled services.
|
||||
//func AddAWSServiceStrategy(serviceType string, cs *AWSCollectionStrategy,
|
||||
// definitionStrat *AWSCollectionStrategy, config *AWSCloudServiceConfig) {
|
||||
// if config.Logs != nil && config.Logs.Enabled {
|
||||
// if serviceType == S3Sync {
|
||||
// // S3 bucket sync; No cloudwatch logs are appended for this service type;
|
||||
// // Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
|
||||
// cs.S3Buckets = config.Logs.S3Buckets
|
||||
// } else if definitionStrat.AWSLogs != nil { // services that includes a logs subscription
|
||||
// cs.AWSLogs.Subscriptions = append(
|
||||
// cs.AWSLogs.Subscriptions,
|
||||
// definitionStrat.AWSLogs.Subscriptions...,
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
// if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
|
||||
// cs.AWSMetrics.StreamFilters = append(
|
||||
// cs.AWSMetrics.StreamFilters,
|
||||
// definitionStrat.AWSMetrics.StreamFilters...,
|
||||
// )
|
||||
// }
|
||||
//}
|
||||
|
||||
type AgentReport struct {
|
||||
TimestampMillis int64 `json:"timestamp_millis"`
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
// Scan scans data from db
|
||||
func (r *AgentReport) Scan(src any) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, r)
|
||||
}
|
||||
|
||||
// Value serializes data to bytes for db insertion
|
||||
func (r *AgentReport) Value() (driver.Value, error) {
|
||||
if r == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(r)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
|
||||
)
|
||||
}
|
||||
return serialized, nil
|
||||
}
|
||||
|
||||
type CloudIntegrationService struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
|
||||
Config []byte `bun:"config,type:text"` // json serialized config
|
||||
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
|
||||
}
|
||||
|
||||
type AWSCloudServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
||||
}
|
||||
|
||||
type AWSCloudServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
247
pkg/types/integrationstypes/cloudservicedefinitions.go
Normal file
247
pkg/types/integrationstypes/cloudservicedefinitions.go
Normal file
@@ -0,0 +1,247 @@
|
||||
package integrationstypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const (
|
||||
S3Sync = "s3sync"
|
||||
)
|
||||
|
||||
type DefinitionMetadata struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Icon string `json:"icon"`
|
||||
}
|
||||
|
||||
type Definition interface {
|
||||
GetId() string
|
||||
Validate() error
|
||||
PopulateDashboardURLs(svcId string)
|
||||
}
|
||||
|
||||
var _ Definition = &AWSServiceDefinition{}
|
||||
var _ Definition = &AzureServiceDefinition{}
|
||||
|
||||
type AWSServiceDefinition struct {
|
||||
DefinitionMetadata
|
||||
Overview string `json:"overview"` // markdown
|
||||
Assets Assets `json:"assets"`
|
||||
SupportedSignals SupportedSignals `json:"supported_signals"`
|
||||
DataCollected DataCollected `json:"data_collected"`
|
||||
Strategy *AWSCollectionStrategy `json:"telemetry_collection_strategy"`
|
||||
IngestionStatusCheck *IngestionStatusCheck `json:"ingestion_status_check"`
|
||||
}
|
||||
|
||||
type IngestionStatusCheck struct {
|
||||
Metrics []*IngestionStatusCheckCategory `json:"metrics"`
|
||||
Logs []*IngestionStatusCheckCategory `json:"logs"`
|
||||
}
|
||||
|
||||
type IngestionStatusCheckCategory struct {
|
||||
Category string `json:"category"`
|
||||
DisplayName string `json:"display_name"`
|
||||
Checks []*IngestionStatusCheckAttribute `json:"checks"`
|
||||
}
|
||||
|
||||
type IngestionStatusCheckAttribute struct {
|
||||
Key string `json:"key"` // search key (metric name or log message)
|
||||
Attributes []*IngestionStatusCheckAttributeFilter `json:"attributes"`
|
||||
}
|
||||
|
||||
type IngestionStatusCheckAttributeFilter struct {
|
||||
Name string `json:"name"`
|
||||
Operator string `json:"operator"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
func (def *AWSServiceDefinition) GetId() string {
|
||||
return def.Id
|
||||
}
|
||||
|
||||
func (def *AWSServiceDefinition) Validate() error {
|
||||
seenDashboardIds := map[string]interface{}{}
|
||||
|
||||
if def.Strategy == nil {
|
||||
return errors.NewInternalf(errors.CodeInternal, "telemetry_collection_strategy is required")
|
||||
}
|
||||
|
||||
for _, dd := range def.Assets.Dashboards {
|
||||
if _, seen := seenDashboardIds[dd.Id]; seen {
|
||||
return errors.NewInternalf(errors.CodeInternal, "multiple dashboards found with id %s for AWS Integration", dd.Id)
|
||||
}
|
||||
seenDashboardIds[dd.Id] = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (def *AWSServiceDefinition) PopulateDashboardURLs(serviceId string) {
|
||||
for i := range def.Assets.Dashboards {
|
||||
dashboardId := def.Assets.Dashboards[i].Id
|
||||
url := "/dashboard/" + GetCloudIntegrationDashboardID(CloudProviderAWS, serviceId, dashboardId)
|
||||
def.Assets.Dashboards[i].Url = url
|
||||
}
|
||||
}
|
||||
|
||||
type AzureServiceDefinition struct {
|
||||
DefinitionMetadata
|
||||
|
||||
Overview string `json:"overview"` // markdown
|
||||
|
||||
Assets Assets `json:"assets"`
|
||||
|
||||
SupportedSignals SupportedSignals `json:"supported_signals"`
|
||||
|
||||
DataCollected DataCollected `json:"data_collected"`
|
||||
|
||||
Strategy *AzureCollectionStrategy `json:"telemetry_collection_strategy"`
|
||||
}
|
||||
|
||||
func (def *AzureServiceDefinition) PopulateDashboardURLs(svcId string) {
|
||||
for i := range def.Assets.Dashboards {
|
||||
dashboardId := def.Assets.Dashboards[i].Id
|
||||
url := "/dashboard/" + GetCloudIntegrationDashboardID(CloudProviderAzure, svcId, dashboardId)
|
||||
def.Assets.Dashboards[i].Url = url
|
||||
}
|
||||
}
|
||||
|
||||
func (def *AzureServiceDefinition) GetId() string {
|
||||
return def.Id
|
||||
}
|
||||
|
||||
func (def *AzureServiceDefinition) Validate() error {
|
||||
seenDashboardIds := map[string]interface{}{}
|
||||
|
||||
if def.Strategy == nil {
|
||||
return errors.NewInternalf(errors.CodeInternal, "telemetry_collection_strategy is required")
|
||||
}
|
||||
|
||||
for _, dd := range def.Assets.Dashboards {
|
||||
if _, seen := seenDashboardIds[dd.Id]; seen {
|
||||
return errors.NewInternalf(errors.CodeInternal, "multiple dashboards found with id %s for Azure Integration", dd.Id)
|
||||
}
|
||||
seenDashboardIds[dd.Id] = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type Assets struct {
|
||||
Dashboards []Dashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
type SupportedSignals struct {
|
||||
Logs bool `json:"logs"`
|
||||
Metrics bool `json:"metrics"`
|
||||
}
|
||||
|
||||
type DataCollected struct {
|
||||
Logs []CollectedLogAttribute `json:"logs"`
|
||||
Metrics []CollectedMetric `json:"metrics"`
|
||||
}
|
||||
|
||||
type CollectedLogAttribute struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type CollectedMetric struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Unit string `json:"unit"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
type AWSCollectionStrategy struct {
|
||||
Provider valuer.String `json:"provider"`
|
||||
|
||||
AWSMetrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
|
||||
AWSLogs *AWSLogsStrategy `json:"aws_logs,omitempty"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type
|
||||
}
|
||||
|
||||
type AzureCollectionStrategy struct {
|
||||
AzureMetrics []*AzureMetricsStrategy `json:"azure_metrics"`
|
||||
AzureLogs []*AzureLogsStrategy `json:"azure_logs"`
|
||||
}
|
||||
|
||||
type AzureResourceGroup struct {
|
||||
Name string `json:"name"`
|
||||
Region string `json:"region"`
|
||||
}
|
||||
|
||||
type AWSMetricsStrategy struct {
|
||||
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
|
||||
StreamFilters []struct {
|
||||
// json tags here are in the shape expected by AWS API as detailed at
|
||||
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
|
||||
Namespace string `json:"Namespace"`
|
||||
MetricNames []string `json:"MetricNames,omitempty"`
|
||||
} `json:"cloudwatch_metric_stream_filters"`
|
||||
}
|
||||
|
||||
type AWSLogsStrategy struct {
|
||||
Subscriptions []struct {
|
||||
// subscribe to all logs groups with specified prefix.
|
||||
// eg: `/aws/rds/`
|
||||
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
||||
|
||||
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
|
||||
// "" implies no filtering is required.
|
||||
FilterPattern string `json:"filter_pattern"`
|
||||
} `json:"cloudwatch_logs_subscriptions"`
|
||||
}
|
||||
|
||||
type AzureMetricsStrategy struct {
|
||||
CategoryType string `json:"category_type"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type AzureLogsStrategy struct {
|
||||
CategoryType string `json:"category_type"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Dashboard struct {
|
||||
Id string `json:"id"`
|
||||
Url string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Image string `json:"image"`
|
||||
Definition *dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
||||
}
|
||||
|
||||
func GetCloudIntegrationDashboardID(cloudProvider valuer.String, svcId, dashboardId string) string {
|
||||
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcId, dashboardId)
|
||||
}
|
||||
|
||||
func GetDashboardsFromAssets(svcId string, cloudProvider CloudProviderType, createdAt *time.Time, assets Assets) []*dashboardtypes.Dashboard {
|
||||
dashboards := make([]*dashboardtypes.Dashboard, 0)
|
||||
|
||||
for _, d := range assets.Dashboards {
|
||||
author := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
dashboards = append(dashboards, &dashboardtypes.Dashboard{
|
||||
ID: GetCloudIntegrationDashboardID(cloudProvider, svcId, d.Id),
|
||||
Locked: true,
|
||||
Data: *d.Definition,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: *createdAt,
|
||||
UpdatedAt: *createdAt,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: author,
|
||||
UpdatedBy: author,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return dashboards
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package cloudintegrations
|
||||
package integrationstypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -41,3 +41,62 @@ var ValidAWSRegions = map[string]bool{
|
||||
"us-west-1": true, // US West (N. California).
|
||||
"us-west-2": true, // US West (Oregon).
|
||||
}
|
||||
|
||||
var ValidAzureRegions = map[string]bool{
|
||||
"australiacentral": true,
|
||||
"australiacentral2": true,
|
||||
"australiaeast": true,
|
||||
"australiasoutheast": true,
|
||||
"austriaeast": true,
|
||||
"belgiumcentral": true,
|
||||
"brazilsouth": true,
|
||||
"brazilsoutheast": true,
|
||||
"canadacentral": true,
|
||||
"canadaeast": true,
|
||||
"centralindia": true,
|
||||
"centralus": true,
|
||||
"chilecentral": true,
|
||||
"denmarkeast": true,
|
||||
"eastasia": true,
|
||||
"eastus": true,
|
||||
"eastus2": true,
|
||||
"francecentral": true,
|
||||
"francesouth": true,
|
||||
"germanynorth": true,
|
||||
"germanywestcentral": true,
|
||||
"indonesiacentral": true,
|
||||
"israelcentral": true,
|
||||
"italynorth": true,
|
||||
"japaneast": true,
|
||||
"japanwest": true,
|
||||
"koreacentral": true,
|
||||
"koreasouth": true,
|
||||
"malaysiawest": true,
|
||||
"mexicocentral": true,
|
||||
"newzealandnorth": true,
|
||||
"northcentralus": true,
|
||||
"northeurope": true,
|
||||
"norwayeast": true,
|
||||
"norwaywest": true,
|
||||
"polandcentral": true,
|
||||
"qatarcentral": true,
|
||||
"southafricanorth": true,
|
||||
"southafricawest": true,
|
||||
"southcentralus": true,
|
||||
"southindia": true,
|
||||
"southeastasia": true,
|
||||
"spaincentral": true,
|
||||
"swedencentral": true,
|
||||
"switzerlandnorth": true,
|
||||
"switzerlandwest": true,
|
||||
"uaecentral": true,
|
||||
"uaenorth": true,
|
||||
"uksouth": true,
|
||||
"ukwest": true,
|
||||
"westcentralus": true,
|
||||
"westeurope": true,
|
||||
"westindia": true,
|
||||
"westus": true,
|
||||
"westus2": true,
|
||||
"westus3": true,
|
||||
}
|
||||
109
pkg/types/integrationstypes/integration.go
Normal file
109
pkg/types/integrationstypes/integration.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package integrationstypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
// CloudProviderType type alias
|
||||
type CloudProviderType = valuer.String
|
||||
|
||||
var (
|
||||
CloudProviderAWS = valuer.NewString("aws")
|
||||
CloudProviderAzure = valuer.NewString("azure")
|
||||
)
|
||||
|
||||
var (
|
||||
CodeCloudProviderInvalidInput = errors.MustNewCode("invalid_cloud_provider")
|
||||
)
|
||||
|
||||
func NewCloudProvider(provider string) (CloudProviderType, error) {
|
||||
switch provider {
|
||||
case CloudProviderAWS.String(), CloudProviderAzure.String():
|
||||
return valuer.NewString(provider), nil
|
||||
default:
|
||||
return CloudProviderType{}, errors.NewInvalidInputf(CodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
AWSIntegrationUserEmail = valuer.MustNewEmail("aws-integration@signoz.io")
|
||||
AzureIntegrationUserEmail = valuer.MustNewEmail("azure-integration@signoz.io")
|
||||
)
|
||||
|
||||
var IntegrationUserEmails = []valuer.Email{
|
||||
AWSIntegrationUserEmail,
|
||||
AzureIntegrationUserEmail,
|
||||
}
|
||||
|
||||
func IsCloudIntegrationDashboardUuid(dashboardUuid string) bool {
|
||||
parts := strings.SplitN(dashboardUuid, "--", 4)
|
||||
if len(parts) != 4 {
|
||||
return false
|
||||
}
|
||||
|
||||
return parts[0] == "cloud-integration"
|
||||
}
|
||||
|
||||
func GetCloudProviderFromDashboardID(dashboardUuid string) (CloudProviderType, error) {
|
||||
parts := strings.SplitN(dashboardUuid, "--", 4)
|
||||
if len(parts) != 4 {
|
||||
return valuer.String{}, errors.NewInvalidInputf(CodeCloudProviderInvalidInput, "invalid dashboard uuid: %s", dashboardUuid)
|
||||
}
|
||||
|
||||
providerStr := parts[1]
|
||||
|
||||
cloudProvider, err := NewCloudProvider(providerStr)
|
||||
if err != nil {
|
||||
return CloudProviderType{}, err
|
||||
}
|
||||
|
||||
return cloudProvider, nil
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Normal integration uses just the installed_integration table
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
type InstalledIntegration struct {
|
||||
bun.BaseModel `bun:"table:installed_integration"`
|
||||
|
||||
types.Identifiable
|
||||
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
|
||||
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
|
||||
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
|
||||
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
|
||||
}
|
||||
|
||||
type InstalledIntegrationConfig map[string]interface{}
|
||||
|
||||
// Scan scans data from db
|
||||
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// Value serializes data to db
|
||||
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
|
||||
filterSetJson, err := json.Marshal(c)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not serialize integration config to JSON")
|
||||
}
|
||||
return filterSetJson, nil
|
||||
}
|
||||
@@ -41,22 +41,6 @@ func NewOrganization(displayName string) *Organization {
|
||||
}
|
||||
}
|
||||
|
||||
func NewOrganizationWithName(name string) *Organization {
|
||||
id := valuer.GenerateUUID()
|
||||
return &Organization{
|
||||
Identifiable: Identifiable{
|
||||
ID: id,
|
||||
},
|
||||
TimeAuditable: TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: name,
|
||||
DisplayName: name,
|
||||
Key: NewOrganizationKey(id),
|
||||
}
|
||||
}
|
||||
|
||||
func NewOrganizationKey(orgID valuer.UUID) uint32 {
|
||||
hasher := fnv.New32a()
|
||||
|
||||
@@ -90,7 +74,6 @@ type TTLSetting struct {
|
||||
type OrganizationStore interface {
|
||||
Create(context.Context, *Organization) error
|
||||
Get(context.Context, valuer.UUID) (*Organization, error)
|
||||
GetByName(context.Context, string) (*Organization, error)
|
||||
GetAll(context.Context) ([]*Organization, error)
|
||||
ListByKeyRange(context.Context, uint32, uint32) ([]*Organization, error)
|
||||
Update(context.Context, *Organization) error
|
||||
|
||||
@@ -11,16 +11,15 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
|
||||
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
|
||||
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
|
||||
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
|
||||
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
|
||||
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
|
||||
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
|
||||
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
|
||||
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
|
||||
ErrCodeRootUserOperationUnsupported = errors.MustNewCode("root_user_operation_unsupported")
|
||||
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
|
||||
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
|
||||
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
|
||||
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
|
||||
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
|
||||
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
|
||||
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
|
||||
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
|
||||
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
|
||||
)
|
||||
|
||||
type GettableUser = User
|
||||
@@ -30,10 +29,9 @@ type User struct {
|
||||
|
||||
Identifiable
|
||||
DisplayName string `bun:"display_name" json:"displayName"`
|
||||
Email valuer.Email `bun:"email" json:"email"`
|
||||
Role Role `bun:"role" json:"role"`
|
||||
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
|
||||
IsRoot bool `bun:"is_root" json:"isRoot"`
|
||||
Email valuer.Email `bun:"email,type:text" json:"email"`
|
||||
Role Role `bun:"role,type:text" json:"role"`
|
||||
OrgID valuer.UUID `bun:"org_id,type:text" json:"orgId"`
|
||||
TimeAuditable
|
||||
}
|
||||
|
||||
@@ -66,7 +64,6 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
|
||||
Email: email,
|
||||
Role: role,
|
||||
OrgID: orgID,
|
||||
IsRoot: false,
|
||||
TimeAuditable: TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
@@ -74,65 +71,6 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewRootUser(displayName string, email valuer.Email, orgID valuer.UUID) (*User, error) {
|
||||
if email.IsZero() {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
|
||||
}
|
||||
|
||||
if orgID.IsZero() {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required")
|
||||
}
|
||||
|
||||
return &User{
|
||||
Identifiable: Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
DisplayName: displayName,
|
||||
Email: email,
|
||||
Role: RoleAdmin,
|
||||
OrgID: orgID,
|
||||
IsRoot: true,
|
||||
TimeAuditable: TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Update applies mutable fields from the input to the user. Immutable fields
|
||||
// (email, is_root, org_id, id) are preserved. Only non-zero input fields are applied.
|
||||
func (u *User) Update(displayName string, role Role) {
|
||||
if displayName != "" {
|
||||
u.DisplayName = displayName
|
||||
}
|
||||
if role != "" {
|
||||
u.Role = role
|
||||
}
|
||||
u.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
// PromoteToRoot promotes the user to a root user with admin role.
|
||||
func (u *User) PromoteToRoot() {
|
||||
u.IsRoot = true
|
||||
u.Role = RoleAdmin
|
||||
u.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
// UpdateEmail updates the email of the user.
|
||||
func (u *User) UpdateEmail(email valuer.Email) {
|
||||
u.Email = email
|
||||
u.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
// ErrIfRoot returns an error if the user is a root user. The caller should
|
||||
// enrich the error with the specific operation using errors.WithAdditionalf.
|
||||
func (u *User) ErrIfRoot() error {
|
||||
if u.IsRoot {
|
||||
return errors.New(errors.TypeUnsupported, ErrCodeRootUserOperationUnsupported, "this operation is not supported for the root user")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTraitsFromUser(user *User) map[string]any {
|
||||
return map[string]any{
|
||||
"name": user.DisplayName,
|
||||
@@ -195,7 +133,7 @@ type UserStore interface {
|
||||
// List users by email and org ids.
|
||||
ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*User, error)
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, user *User) error
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *User) (*User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string) error
|
||||
|
||||
// Creates a password.
|
||||
@@ -218,9 +156,6 @@ type UserStore interface {
|
||||
|
||||
CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
|
||||
|
||||
// Get root user by org.
|
||||
GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*User, error)
|
||||
|
||||
// Transaction
|
||||
RunInTx(ctx context.Context, cb func(ctx context.Context) error) error
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.metrics import Metrics
|
||||
@@ -20,7 +20,7 @@ logger = setup_logger(__name__)
|
||||
def create_alert_rule(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> Callable[[dict], str]:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
rule_ids = []
|
||||
|
||||
|
||||
@@ -11,56 +11,55 @@ from wiremock.resources.mappings import (
|
||||
WireMockMatchers,
|
||||
)
|
||||
|
||||
from fixtures import types
|
||||
from fixtures import dev, types
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
# USER_ADMIN_NAME = "admin"
|
||||
# USER_ADMIN_EMAIL = "admin@integration.test"
|
||||
# USER_ADMIN_PASSWORD = "password123Z$"
|
||||
USER_ADMIN_NAME = "admin"
|
||||
USER_ADMIN_EMAIL = "admin@integration.test"
|
||||
USER_ADMIN_PASSWORD = "password123Z$"
|
||||
|
||||
USER_EDITOR_NAME = "editor"
|
||||
USER_EDITOR_EMAIL = "editor@integration.test"
|
||||
USER_EDITOR_PASSWORD = "password123Z$"
|
||||
|
||||
|
||||
# @pytest.fixture(name="create_user_admin", scope="package")
|
||||
# def create_user_admin(
|
||||
# signoz: types.SigNoz, request: pytest.FixtureRequest, pytestconfig: pytest.Config
|
||||
# ) -> types.Operation:
|
||||
# def create() -> None:
|
||||
# response = requests.post(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
# json={
|
||||
# "name": USER_ADMIN_NAME,
|
||||
# "orgName": "",
|
||||
# "email": USER_ADMIN_EMAIL,
|
||||
# "password": USER_ADMIN_PASSWORD,
|
||||
# },
|
||||
# timeout=5,
|
||||
# )
|
||||
@pytest.fixture(name="create_user_admin", scope="package")
|
||||
def create_user_admin(
|
||||
signoz: types.SigNoz, request: pytest.FixtureRequest, pytestconfig: pytest.Config
|
||||
) -> types.Operation:
|
||||
def create() -> None:
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
json={
|
||||
"name": USER_ADMIN_NAME,
|
||||
"orgName": "",
|
||||
"email": USER_ADMIN_EMAIL,
|
||||
"password": USER_ADMIN_PASSWORD,
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.OK
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
# return types.Operation(name="create_user_admin")
|
||||
return types.Operation(name="create_user_admin")
|
||||
|
||||
# def delete(_: types.Operation) -> None:
|
||||
# pass
|
||||
def delete(_: types.Operation) -> None:
|
||||
pass
|
||||
|
||||
# def restore(cache: dict) -> types.Operation:
|
||||
# return types.Operation(name=cache["name"])
|
||||
def restore(cache: dict) -> types.Operation:
|
||||
return types.Operation(name=cache["name"])
|
||||
|
||||
# return dev.wrap(
|
||||
# request,
|
||||
# pytestconfig,
|
||||
# "create_user_admin",
|
||||
# lambda: types.Operation(name=""),
|
||||
# create,
|
||||
# delete,
|
||||
# restore,
|
||||
# )
|
||||
return dev.wrap(
|
||||
request,
|
||||
pytestconfig,
|
||||
"create_user_admin",
|
||||
lambda: types.Operation(name=""),
|
||||
create,
|
||||
delete,
|
||||
restore,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="get_session_context", scope="function")
|
||||
@@ -190,7 +189,7 @@ def add_license(
|
||||
],
|
||||
)
|
||||
|
||||
access_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
access_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.post(
|
||||
url=signoz.self.host_configs["8080"].get("/api/v3/licenses"),
|
||||
|
||||
@@ -7,7 +7,7 @@ import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -73,7 +73,7 @@ def create_cloud_integration_account(
|
||||
if created_account_id and cloud_provider_used:
|
||||
get_token = request.getfixturevalue("get_token")
|
||||
try:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
r = _disconnect(admin_token, cloud_provider_used)
|
||||
if r.status_code != HTTPStatus.OK:
|
||||
logger.info(
|
||||
|
||||
@@ -9,7 +9,7 @@ from testcontainers.core.container import Network
|
||||
from wiremock.testing.testcontainer import WireMockContainer
|
||||
|
||||
from fixtures import dev, types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -74,9 +74,10 @@ def notification_channel(
|
||||
@pytest.fixture(name="create_webhook_notification_channel", scope="function")
|
||||
def create_webhook_notification_channel(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> Callable[[str, str, dict, bool], str]:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# function to create notification channel
|
||||
def _create_webhook_notification_channel(
|
||||
|
||||
@@ -15,9 +15,6 @@ from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
ROOT_USER_EMAIL = "root@integration.test"
|
||||
ROOT_USER_PASSWORD = "Str0ngP@ssw0rd!"
|
||||
|
||||
|
||||
@pytest.fixture(name="signoz", scope="package")
|
||||
def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
@@ -76,9 +73,6 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
"SIGNOZ_ALERTMANAGER_SIGNOZ_POLL__INTERVAL": "5s",
|
||||
"SIGNOZ_ALERTMANAGER_SIGNOZ_ROUTE_GROUP__WAIT": "1s",
|
||||
"SIGNOZ_ALERTMANAGER_SIGNOZ_ROUTE_GROUP__INTERVAL": "5s",
|
||||
"SIGNOZ_USER_ROOT_ENABLED": True,
|
||||
"SIGNOZ_USER_ROOT_EMAIL": ROOT_USER_EMAIL,
|
||||
"SIGNOZ_USER_ROOT_PASSWORD": ROOT_USER_PASSWORD,
|
||||
}
|
||||
| sqlstore.env
|
||||
| clickhouse.env
|
||||
|
||||
@@ -7,7 +7,7 @@ import requests
|
||||
from wiremock.client import HttpMethods, Mapping, MappingRequest, MappingResponse
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -65,7 +65,7 @@ def test_webhook_notification_channel(
|
||||
time.sleep(10)
|
||||
|
||||
# Call test API for the notification channel
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.post(
|
||||
url=signoz.self.host_configs["8080"].get("/api/v1/testChannel"),
|
||||
json={
|
||||
|
||||
@@ -35,6 +35,7 @@ def test_telemetry_databases_exist(signoz: types.SigNoz) -> None:
|
||||
def test_teardown(
|
||||
signoz: types.SigNoz, # pylint: disable=unused-argument
|
||||
idp: types.TestContainerIDP, # pylint: disable=unused-argument
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
migrator: types.Operation, # pylint: disable=unused-argument
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@@ -3,15 +3,16 @@ from typing import Callable
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import SigNoz
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.types import Operation, SigNoz
|
||||
|
||||
|
||||
def test_create_and_get_domain(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get domains which should be an empty list
|
||||
response = requests.get(
|
||||
@@ -90,9 +91,10 @@ def test_create_and_get_domain(
|
||||
|
||||
def test_create_invalid(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a domain with type saml and body for oidc, this should fail because oidcConfig is not allowed for saml
|
||||
response = requests.post(
|
||||
@@ -171,10 +173,11 @@ def test_create_invalid(
|
||||
|
||||
def test_create_invalid_role_mapping(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Test that invalid role mappings are rejected."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create domain with invalid defaultRole
|
||||
response = requests.post(
|
||||
|
||||
@@ -6,18 +6,22 @@ import requests
|
||||
from selenium import webdriver
|
||||
from wiremock.resources.mappings import Mapping
|
||||
|
||||
from fixtures.auth import add_license
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.idputils import (
|
||||
get_saml_domain,
|
||||
get_user_by_email,
|
||||
perform_saml_login,
|
||||
)
|
||||
from fixtures.types import SigNoz, TestContainerDocker, TestContainerIDP
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
|
||||
|
||||
def test_apply_license(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[TestContainerDocker, List[Mapping]], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
@@ -30,6 +34,7 @@ def test_create_auth_domain(
|
||||
create_saml_client: Callable[[str, str], None],
|
||||
update_saml_client_attributes: Callable[[str, Dict[str, Any]], None],
|
||||
get_saml_settings: Callable[[], dict],
|
||||
create_user_admin: Callable[[], None], # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
# Create a saml client in the idp.
|
||||
@@ -39,7 +44,7 @@ def test_create_auth_domain(
|
||||
settings = get_saml_settings()
|
||||
|
||||
# Create a auth domain in signoz.
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
@@ -122,7 +127,7 @@ def test_saml_authn(
|
||||
driver.get(url)
|
||||
idp_login("viewer@saml.integration.test", "password")
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Assert that the user was created in signoz.
|
||||
response = requests.get(
|
||||
@@ -173,7 +178,7 @@ def test_idp_initiated_saml_authn(
|
||||
driver.get(idp_initiated_login_url)
|
||||
idp_login("viewer.idp.initiated@saml.integration.test", "password")
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Assert that the user was created in signoz.
|
||||
response = requests.get(
|
||||
@@ -203,7 +208,7 @@ def test_saml_update_domain_with_group_mappings(
|
||||
get_token: Callable[[str, str], str],
|
||||
get_saml_settings: Callable[[], dict],
|
||||
) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_saml_domain(signoz, admin_token)
|
||||
settings = get_saml_settings()
|
||||
|
||||
@@ -261,7 +266,7 @@ def test_saml_role_mapping_single_group_admin(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -287,7 +292,7 @@ def test_saml_role_mapping_single_group_editor(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -317,7 +322,7 @@ def test_saml_role_mapping_multiple_groups_highest_wins(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -344,7 +349,7 @@ def test_saml_role_mapping_explicit_viewer_group(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -370,7 +375,7 @@ def test_saml_role_mapping_unmapped_group_uses_default(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -385,7 +390,7 @@ def test_saml_update_domain_with_use_role_claim(
|
||||
"""
|
||||
Updates SAML domain to enable useRoleAttribute (direct role attribute).
|
||||
"""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_saml_domain(signoz, admin_token)
|
||||
settings = get_saml_settings()
|
||||
|
||||
@@ -447,7 +452,7 @@ def test_saml_role_mapping_role_claim_takes_precedence(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -477,7 +482,7 @@ def test_saml_role_mapping_invalid_role_claim_fallback(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -507,7 +512,7 @@ def test_saml_role_mapping_case_insensitive(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -532,7 +537,7 @@ def test_saml_name_mapping(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -560,7 +565,7 @@ def test_saml_empty_name_fallback(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
|
||||
@@ -6,18 +6,22 @@ import requests
|
||||
from selenium import webdriver
|
||||
from wiremock.resources.mappings import Mapping
|
||||
|
||||
from fixtures.auth import add_license
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.idputils import (
|
||||
get_oidc_domain,
|
||||
get_user_by_email,
|
||||
perform_oidc_login,
|
||||
)
|
||||
from fixtures.types import SigNoz, TestContainerDocker, TestContainerIDP
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
|
||||
|
||||
def test_apply_license(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[TestContainerDocker, List[Mapping]], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
@@ -32,6 +36,7 @@ def test_create_auth_domain(
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
create_oidc_client: Callable[[str, str], None],
|
||||
get_oidc_settings: Callable[[], dict],
|
||||
create_user_admin: Callable[[], None], # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -45,7 +50,7 @@ def test_create_auth_domain(
|
||||
settings = get_oidc_settings(client_id)
|
||||
|
||||
# Create a auth domain in signoz.
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
@@ -104,7 +109,7 @@ def test_oidc_authn(
|
||||
driver.get(actual_url)
|
||||
idp_login("viewer@oidc.integration.test", "password123")
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Assert that the user was created in signoz.
|
||||
response = requests.get(
|
||||
@@ -138,7 +143,7 @@ def test_oidc_update_domain_with_group_mappings(
|
||||
"""
|
||||
Updates OIDC domain to add role mapping with group mappings and claim mapping.
|
||||
"""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_oidc_domain(signoz, admin_token)
|
||||
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
|
||||
settings = get_oidc_settings(client_id)
|
||||
@@ -199,7 +204,7 @@ def test_oidc_role_mapping_single_group_admin(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -225,7 +230,7 @@ def test_oidc_role_mapping_single_group_editor(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -255,7 +260,7 @@ def test_oidc_role_mapping_multiple_groups_highest_wins(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -282,7 +287,7 @@ def test_oidc_role_mapping_explicit_viewer_group(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -308,7 +313,7 @@ def test_oidc_role_mapping_unmapped_group_uses_default(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -324,7 +329,7 @@ def test_oidc_update_domain_with_use_role_claim(
|
||||
"""
|
||||
Updates OIDC domain to enable useRoleClaim.
|
||||
"""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_oidc_domain(signoz, admin_token)
|
||||
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
|
||||
settings = get_oidc_settings(client_id)
|
||||
@@ -388,7 +393,7 @@ def test_oidc_role_mapping_role_claim_takes_precedence(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -420,7 +425,7 @@ def test_oidc_role_mapping_invalid_role_claim_fallback(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -450,7 +455,7 @@ def test_oidc_role_mapping_case_insensitive(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
@@ -476,7 +481,7 @@ def test_oidc_name_mapping(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
@@ -512,7 +517,7 @@ def test_oidc_empty_name_uses_fallback(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
|
||||
@@ -11,21 +11,21 @@ from wiremock.client import (
|
||||
)
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import add_license
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
def test_generate_connection_params(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test to generate connection parameters for AWS SigNoz cloud integration."""
|
||||
# Get authentication token for admin user
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
add_license(signoz, make_http_mocks, get_token)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -12,12 +12,13 @@ logger = setup_logger(__name__)
|
||||
|
||||
def test_generate_connection_url(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test to generate connection URL for AWS CloudFormation stack deployment."""
|
||||
|
||||
# Get authentication token for admin user
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
cloud_provider = "aws"
|
||||
endpoint = (
|
||||
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
|
||||
@@ -100,11 +101,12 @@ def test_generate_connection_url(
|
||||
|
||||
def test_generate_connection_url_unsupported_provider(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test that unsupported cloud providers return an error."""
|
||||
# Get authentication token for admin user
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
# Try with GCP (unsupported)
|
||||
cloud_provider = "gcp"
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.cloudintegrationsutils import simulate_agent_checkin
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
@@ -14,10 +14,11 @@ logger = setup_logger(__name__)
|
||||
|
||||
def test_list_connected_accounts_empty(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test listing connected accounts when there are none."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
cloud_provider = "aws"
|
||||
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
|
||||
|
||||
@@ -42,11 +43,12 @@ def test_list_connected_accounts_empty(
|
||||
|
||||
def test_list_connected_accounts_with_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test listing connected accounts after creating one."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account
|
||||
cloud_provider = "aws"
|
||||
@@ -86,11 +88,12 @@ def test_list_connected_accounts_with_account(
|
||||
|
||||
def test_get_account_status(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test getting the status of a specific account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
# Create a test account (no check-in needed for status check)
|
||||
cloud_provider = "aws"
|
||||
account_data = create_cloud_integration_account(admin_token, cloud_provider)
|
||||
@@ -120,10 +123,11 @@ def test_get_account_status(
|
||||
|
||||
def test_get_account_status_not_found(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test getting status for a non-existent account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
cloud_provider = "aws"
|
||||
fake_account_id = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
@@ -143,11 +147,12 @@ def test_get_account_status_not_found(
|
||||
|
||||
def test_update_account_config(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test updating account configuration."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account
|
||||
cloud_provider = "aws"
|
||||
@@ -207,11 +212,12 @@ def test_update_account_config(
|
||||
|
||||
def test_disconnect_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test disconnecting an account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account
|
||||
cloud_provider = "aws"
|
||||
@@ -261,10 +267,11 @@ def test_disconnect_account(
|
||||
|
||||
def test_disconnect_account_not_found(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test disconnecting a non-existent account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "aws"
|
||||
fake_account_id = "00000000-0000-0000-0000-000000000000"
|
||||
@@ -284,11 +291,12 @@ def test_disconnect_account_not_found(
|
||||
|
||||
def test_list_accounts_unsupported_provider(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test listing accounts for an unsupported cloud provider."""
|
||||
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "gcp" # Unsupported provider
|
||||
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.cloudintegrationsutils import simulate_agent_checkin
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
@@ -14,10 +14,11 @@ logger = setup_logger(__name__)
|
||||
|
||||
def test_list_services_without_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test listing available services without specifying an account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "aws"
|
||||
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
|
||||
@@ -47,11 +48,12 @@ def test_list_services_without_account(
|
||||
|
||||
def test_list_services_with_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test listing services for a specific connected account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account and do check-in
|
||||
cloud_provider = "aws"
|
||||
@@ -91,10 +93,11 @@ def test_list_services_with_account(
|
||||
|
||||
def test_get_service_details_without_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test getting service details without specifying an account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "aws"
|
||||
# First get the list of services to get a valid service ID
|
||||
@@ -136,11 +139,12 @@ def test_get_service_details_without_account(
|
||||
|
||||
def test_get_service_details_with_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test getting service details for a specific connected account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account and do check-in
|
||||
cloud_provider = "aws"
|
||||
@@ -191,10 +195,11 @@ def test_get_service_details_with_account(
|
||||
|
||||
def test_get_service_details_invalid_service(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test getting details for a non-existent service."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "aws"
|
||||
fake_service_id = "non-existent-service"
|
||||
@@ -213,10 +218,11 @@ def test_get_service_details_invalid_service(
|
||||
|
||||
def test_list_services_unsupported_provider(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test listing services for an unsupported cloud provider."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "gcp" # Unsupported provider
|
||||
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
|
||||
@@ -234,11 +240,12 @@ def test_list_services_unsupported_provider(
|
||||
|
||||
def test_update_service_config(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test updating service configuration for a connected account."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account and do check-in
|
||||
cloud_provider = "aws"
|
||||
@@ -299,10 +306,11 @@ def test_update_service_config(
|
||||
|
||||
def test_update_service_config_without_account(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test updating service config without a connected account should fail."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
cloud_provider = "aws"
|
||||
|
||||
@@ -344,11 +352,12 @@ def test_update_service_config_without_account(
|
||||
|
||||
def test_update_service_config_invalid_service(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test updating config for a non-existent service should fail."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account and do check-in
|
||||
cloud_provider = "aws"
|
||||
@@ -387,11 +396,12 @@ def test_update_service_config_invalid_service(
|
||||
|
||||
def test_update_service_config_disable_service(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
create_cloud_integration_account: Callable,
|
||||
) -> None:
|
||||
"""Test disabling a service by updating config with enabled=false."""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a test account and do check-in
|
||||
cloud_provider = "aws"
|
||||
|
||||
@@ -4,16 +4,16 @@ from typing import Callable, List
|
||||
import requests
|
||||
from wiremock.resources.mappings import Mapping
|
||||
|
||||
from fixtures.auth import add_license
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import SigNoz, TestContainerDocker
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker
|
||||
|
||||
|
||||
def test_create_and_delete_dashboard_without_license(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/dashboards"),
|
||||
@@ -38,6 +38,7 @@ def test_create_and_delete_dashboard_without_license(
|
||||
|
||||
def test_apply_license(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[TestContainerDocker, List[Mapping]], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
@@ -49,9 +50,10 @@ def test_apply_license(
|
||||
|
||||
def test_create_and_delete_dashboard_with_license(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/dashboards"),
|
||||
|
||||
@@ -6,13 +6,13 @@ import requests
|
||||
from sqlalchemy import sql
|
||||
from wiremock.resources.mappings import Mapping
|
||||
|
||||
from fixtures.auth import add_license
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import SigNoz, TestContainerDocker
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker
|
||||
|
||||
|
||||
def test_apply_license(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[TestContainerDocker, List[Mapping]], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
@@ -24,9 +24,10 @@ def test_apply_license(
|
||||
|
||||
def test_create_and_get_public_dashboard(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/dashboards"),
|
||||
@@ -71,9 +72,10 @@ def test_create_and_get_public_dashboard(
|
||||
|
||||
def test_public_dashboard_widget_query_range(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
dashboard_req = {
|
||||
"title": "Test Widget Query Range Dashboard",
|
||||
@@ -194,12 +196,13 @@ def test_public_dashboard_widget_query_range(
|
||||
def test_anonymous_role_has_public_dashboard_permission(
|
||||
request: pytest.FixtureRequest,
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""
|
||||
Verify that the signoz-anonymous role has the public-dashboard/* permission.
|
||||
"""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get the roles to find the org_id
|
||||
response = requests.get(
|
||||
|
||||
@@ -11,11 +11,12 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
|
||||
|
||||
def test_create_logs_pipeline_success(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -28,7 +29,7 @@ def test_create_logs_pipeline_success(
|
||||
3. Verify the response contains version information
|
||||
4. Verify the pipeline is returned in the response
|
||||
"""
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
pipeline_payload = {
|
||||
"pipelines": [
|
||||
@@ -104,6 +105,7 @@ def test_create_logs_pipeline_success(
|
||||
|
||||
def test_list_logs_pipelines_success(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -115,7 +117,7 @@ def test_list_logs_pipelines_success(
|
||||
2. List all pipelines and verify the created pipeline is present
|
||||
3. Verify the response structure
|
||||
"""
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a pipeline first
|
||||
create_payload = {
|
||||
@@ -192,6 +194,7 @@ def test_list_logs_pipelines_success(
|
||||
|
||||
def test_list_logs_pipelines_by_version(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -204,7 +207,7 @@ def test_list_logs_pipelines_by_version(
|
||||
3. List pipelines by version 1 and verify it returns the original
|
||||
4. List pipelines by version 2 and verify it returns the updated version
|
||||
"""
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create version 1
|
||||
v1_payload = {
|
||||
@@ -353,6 +356,7 @@ def test_list_logs_pipelines_by_version(
|
||||
|
||||
def test_preview_logs_pipelines_success(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -364,7 +368,7 @@ def test_preview_logs_pipelines_success(
|
||||
2. Verify the preview processes logs correctly
|
||||
3. Verify the response contains processed logs
|
||||
"""
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
preview_payload = {
|
||||
"pipelines": [
|
||||
@@ -444,6 +448,7 @@ def test_preview_logs_pipelines_success(
|
||||
|
||||
def test_create_multiple_pipelines_success(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -455,7 +460,7 @@ def test_create_multiple_pipelines_success(
|
||||
2. Verify all pipelines are created
|
||||
3. Verify pipelines are ordered correctly
|
||||
"""
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
multi_pipeline_payload = {
|
||||
"pipelines": [
|
||||
@@ -557,6 +562,7 @@ def test_create_multiple_pipelines_success(
|
||||
|
||||
def test_delete_all_pipelines_success(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""
|
||||
@@ -569,7 +575,7 @@ def test_delete_all_pipelines_success(
|
||||
3. Verify pipelines are deleted
|
||||
4. Verify new version is created
|
||||
"""
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a pipeline first
|
||||
create_payload = {
|
||||
|
||||
@@ -5,102 +5,100 @@ import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD, USER_EDITOR_NAME
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
# def test_register_with_invalid_input(signoz: types.SigNoz) -> None:
|
||||
# """
|
||||
# Test the register endpoint with invalid input.
|
||||
# 1. Invalid Password
|
||||
# 2. Invalid Email
|
||||
# """
|
||||
# response = requests.post(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
# json={
|
||||
# "name": "admin",
|
||||
# "orgId": "",
|
||||
# "orgName": "integration.test",
|
||||
# "email": "admin@integration.test",
|
||||
# "password": "password", # invalid password
|
||||
# },
|
||||
# timeout=2,
|
||||
# )
|
||||
def test_register_with_invalid_input(signoz: types.SigNoz) -> None:
|
||||
"""
|
||||
Test the register endpoint with invalid input.
|
||||
1. Invalid Password
|
||||
2. Invalid Email
|
||||
"""
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
json={
|
||||
"name": "admin",
|
||||
"orgId": "",
|
||||
"orgName": "integration.test",
|
||||
"email": "admin@integration.test",
|
||||
"password": "password", # invalid password
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
# response = requests.post(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
# json={
|
||||
# "name": "admin",
|
||||
# "orgId": "",
|
||||
# "orgName": "integration.test",
|
||||
# "email": "admin", # invalid email
|
||||
# "password": "password123Z$",
|
||||
# },
|
||||
# timeout=2,
|
||||
# )
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
json={
|
||||
"name": "admin",
|
||||
"orgId": "",
|
||||
"orgName": "integration.test",
|
||||
"email": "admin", # invalid email
|
||||
"password": "password123Z$",
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
# def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
|
||||
# response = requests.get(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
|
||||
# )
|
||||
def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.OK
|
||||
# assert response.json()["setupCompleted"] is False
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["setupCompleted"] is False
|
||||
|
||||
# response = requests.post(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
# json={
|
||||
# "name": "admin",
|
||||
# "orgId": "",
|
||||
# "orgName": "integration.test",
|
||||
# "email": "admin@integration.test",
|
||||
# "password": "password123Z$",
|
||||
# },
|
||||
# timeout=2,
|
||||
# )
|
||||
# assert response.status_code == HTTPStatus.OK
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
json={
|
||||
"name": "admin",
|
||||
"orgId": "",
|
||||
"orgName": "integration.test",
|
||||
"email": "admin@integration.test",
|
||||
"password": "password123Z$",
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
# response = requests.get(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
|
||||
# )
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.OK
|
||||
# assert response.json()["setupCompleted"] is True
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["setupCompleted"] is True
|
||||
|
||||
# admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# response = requests.get(
|
||||
# signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
# timeout=2,
|
||||
# headers={"Authorization": f"Bearer {admin_token}"},
|
||||
# )
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.OK
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
# user_response = response.json()["data"]
|
||||
# found_user = next(
|
||||
# (user for user in user_response if user["email"] == "admin@integration.test"),
|
||||
# None,
|
||||
# )
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "admin@integration.test"),
|
||||
None,
|
||||
)
|
||||
|
||||
# assert found_user is not None
|
||||
# assert found_user["role"] == "ADMIN"
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
# response = requests.get(
|
||||
# signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user['id']}"),
|
||||
# timeout=2,
|
||||
# headers={"Authorization": f"Bearer {admin_token}"},
|
||||
# )
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user["id"]}"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
# assert response.status_code == HTTPStatus.OK
|
||||
# assert response.json()["data"]["role"] == "ADMIN"
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["data"]["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_invite_and_register(
|
||||
@@ -109,10 +107,10 @@ def test_invite_and_register(
|
||||
# Generate an invite token for the editor user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": USER_EDITOR_EMAIL, "role": "EDITOR", "name": USER_EDITOR_NAME},
|
||||
json={"email": "editor@integration.test", "role": "EDITOR", "name": "editor"},
|
||||
timeout=2,
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)}"
|
||||
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -122,7 +120,7 @@ def test_invite_and_register(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)}"
|
||||
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -131,7 +129,7 @@ def test_invite_and_register(
|
||||
(
|
||||
invite
|
||||
for invite in invite_response
|
||||
if invite["email"] == USER_EDITOR_EMAIL
|
||||
if invite["email"] == "editor@integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
@@ -140,8 +138,8 @@ def test_invite_and_register(
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
|
||||
json={
|
||||
"password": USER_EDITOR_PASSWORD,
|
||||
"displayName": USER_EDITOR_NAME,
|
||||
"password": "password123Z$",
|
||||
"displayName": "editor",
|
||||
"token": f"{found_invite['token']}",
|
||||
},
|
||||
timeout=2,
|
||||
@@ -161,7 +159,7 @@ def test_invite_and_register(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)}"
|
||||
"Authorization": f"Bearer {get_token("editor@integration.test", "password123Z$")}"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -172,7 +170,7 @@ def test_invite_and_register(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)}"
|
||||
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -180,20 +178,20 @@ def test_invite_and_register(
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == USER_EDITOR_EMAIL),
|
||||
(user for user in user_response if user["email"] == "editor@integration.test"),
|
||||
None,
|
||||
)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert found_user["displayName"] == USER_EDITOR_NAME
|
||||
assert found_user["email"] == USER_EDITOR_EMAIL
|
||||
assert found_user["displayName"] == "editor"
|
||||
assert found_user["email"] == "editor@integration.test"
|
||||
|
||||
|
||||
def test_revoke_invite_and_register(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
# Generate an invite token for the viewer user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
@@ -208,7 +206,7 @@ def test_revoke_invite_and_register(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)}"
|
||||
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -236,7 +234,7 @@ def test_revoke_invite_and_register(
|
||||
json={
|
||||
"password": "password123Z$",
|
||||
"displayName": "viewer",
|
||||
"token": f"{found_invite['token']}",
|
||||
"token": f"{found_invite["token"]}",
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
@@ -247,7 +245,7 @@ def test_revoke_invite_and_register(
|
||||
def test_self_access(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
@@ -259,7 +257,7 @@ def test_self_access(
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == USER_EDITOR_EMAIL),
|
||||
(user for user in user_response if user["email"] == "editor@integration.test"),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ from wiremock.client import (
|
||||
)
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
|
||||
|
||||
def test_apply_license(
|
||||
@@ -57,7 +56,7 @@ def test_apply_license(
|
||||
],
|
||||
)
|
||||
|
||||
access_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
access_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
response = requests.post(
|
||||
url=signoz.self.host_configs["8080"].get("/api/v3/licenses"),
|
||||
@@ -120,7 +119,7 @@ def test_refresh_license(
|
||||
],
|
||||
)
|
||||
|
||||
access_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
access_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
response = requests.put(
|
||||
url=signoz.self.host_configs["8080"].get("/api/v3/licenses"),
|
||||
@@ -177,7 +176,7 @@ def test_license_checkout(
|
||||
],
|
||||
)
|
||||
|
||||
access_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
access_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
response = requests.post(
|
||||
url=signoz.self.host_configs["8080"].get("/api/v1/checkout"),
|
||||
@@ -228,7 +227,7 @@ def test_license_portal(
|
||||
],
|
||||
)
|
||||
|
||||
access_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
access_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
response = requests.post(
|
||||
url=signoz.self.host_configs["8080"].get("/api/v1/portal"),
|
||||
|
||||
@@ -4,11 +4,10 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
|
||||
|
||||
def test_api_key(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/pats"),
|
||||
@@ -29,7 +28,7 @@ def test_api_key(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"SIGNOZ-API-KEY": f"{pat_response['data']['token']}"},
|
||||
headers={"SIGNOZ-API-KEY": f"{pat_response["data"]["token"]}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -39,14 +38,14 @@ def test_api_key(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
|
||||
(
|
||||
user
|
||||
for user in user_response["data"]
|
||||
if user["email"] == ROOT_USER_EMAIL
|
||||
if user["email"] == "admin@integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/pats"),
|
||||
headers={"SIGNOZ-API-KEY": f"{pat_response['data']['token']}"},
|
||||
headers={"SIGNOZ-API-KEY": f"{pat_response["data"]["token"]}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ from sqlalchemy import sql
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
@@ -14,7 +13,7 @@ logger = setup_logger(__name__)
|
||||
def test_change_password(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# Create another admin user
|
||||
response = requests.post(
|
||||
@@ -131,7 +130,7 @@ def test_change_password(
|
||||
def test_reset_password(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# Get the user id for admin+password@integration.test
|
||||
response = requests.get(
|
||||
@@ -189,7 +188,7 @@ def test_reset_password(
|
||||
def test_reset_password_with_no_password(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# Get the user id for admin+password@integration.test
|
||||
response = requests.get(
|
||||
@@ -254,7 +253,7 @@ def test_forgot_password_returns_204_for_nonexistent_email(
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
|
||||
params={
|
||||
"email": ROOT_USER_EMAIL,
|
||||
"email": "admin@integration.test",
|
||||
"ref": f"{signoz.self.host_configs['8080'].base()}",
|
||||
},
|
||||
timeout=5,
|
||||
@@ -287,7 +286,7 @@ def test_forgot_password_creates_reset_token(
|
||||
3. Use the token to reset password
|
||||
4. Verify user can login with new password
|
||||
"""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# Create a user specifically for testing forgot password
|
||||
response = requests.post(
|
||||
@@ -419,7 +418,7 @@ def test_reset_password_with_expired_token(
|
||||
"""
|
||||
Test that resetting password with an expired token fails.
|
||||
"""
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# Get user ID for the forgot@integration.test user
|
||||
response = requests.get(
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing import Callable, Tuple
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
|
||||
|
||||
def test_change_role(
|
||||
@@ -12,7 +11,7 @@ def test_change_role(
|
||||
get_token: Callable[[str, str], str],
|
||||
get_tokens: Callable[[str, str], Tuple[str, str]],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
# Create a new user as VIEWER
|
||||
response = requests.post(
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -12,9 +12,10 @@ logger = setup_logger(__name__)
|
||||
|
||||
def test_get_user_preference(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/preferences"),
|
||||
@@ -28,9 +29,10 @@ def test_get_user_preference(
|
||||
|
||||
def test_get_set_user_preference_by_name(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# preference does not exist
|
||||
response = requests.get(
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -12,9 +12,10 @@ logger = setup_logger(__name__)
|
||||
|
||||
def test_get_org_preference(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/org/preferences"),
|
||||
@@ -28,9 +29,10 @@ def test_get_org_preference(
|
||||
|
||||
def test_get_set_org_preference_by_name(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# preference does not exist
|
||||
response = requests.get(
|
||||
|
||||
@@ -6,7 +6,7 @@ import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.querier import (
|
||||
assert_minutely_bucket_values,
|
||||
@@ -19,6 +19,7 @@ from src.querier.util import assert_identical_query_response
|
||||
|
||||
def test_logs_list(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -83,7 +84,7 @@ def test_logs_list(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Query Logs for the last 10 seconds and check if the logs are returned in the correct order
|
||||
response = requests.post(
|
||||
@@ -400,6 +401,7 @@ def test_logs_list(
|
||||
|
||||
def test_logs_list_with_corrupt_data(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -468,7 +470,7 @@ def test_logs_list_with_corrupt_data(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Query Logs for the last 10 seconds and check if the logs are returned in the correct order
|
||||
response = requests.post(
|
||||
@@ -581,6 +583,7 @@ def test_logs_list_with_corrupt_data(
|
||||
)
|
||||
def test_logs_list_with_order_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
order_by_context: str,
|
||||
@@ -610,7 +613,7 @@ def test_logs_list_with_order_by(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
query = {
|
||||
"type": "builder_query",
|
||||
@@ -682,6 +685,7 @@ def test_logs_list_with_order_by(
|
||||
|
||||
def test_logs_time_series_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -784,7 +788,7 @@ def test_logs_time_series_count(
|
||||
)
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# count() of all logs for the last 5 minutes
|
||||
response = requests.post(
|
||||
@@ -1222,6 +1226,7 @@ def test_logs_time_series_count(
|
||||
|
||||
def test_datatype_collision(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -1327,7 +1332,7 @@ def test_datatype_collision(
|
||||
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# count() of all logs for the where severity_number > '7'
|
||||
response = requests.post(
|
||||
@@ -1656,6 +1661,7 @@ def test_datatype_collision(
|
||||
|
||||
def test_logs_fill_gaps(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -1681,7 +1687,7 @@ def test_logs_fill_gaps(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1741,6 +1747,7 @@ def test_logs_fill_gaps(
|
||||
|
||||
def test_logs_fill_gaps_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -1766,7 +1773,7 @@ def test_logs_fill_gaps_with_group_by(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1841,6 +1848,7 @@ def test_logs_fill_gaps_with_group_by(
|
||||
|
||||
def test_logs_fill_gaps_formula(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -1866,7 +1874,7 @@ def test_logs_fill_gaps_formula(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1947,6 +1955,7 @@ def test_logs_fill_gaps_formula(
|
||||
|
||||
def test_logs_fill_gaps_formula_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -1972,7 +1981,7 @@ def test_logs_fill_gaps_formula_with_group_by(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -2074,6 +2083,7 @@ def test_logs_fill_gaps_formula_with_group_by(
|
||||
|
||||
def test_logs_fill_zero(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -2092,7 +2102,7 @@ def test_logs_fill_zero(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -2149,6 +2159,7 @@ def test_logs_fill_zero(
|
||||
|
||||
def test_logs_fill_zero_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -2174,7 +2185,7 @@ def test_logs_fill_zero_with_group_by(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -2248,6 +2259,7 @@ def test_logs_fill_zero_with_group_by(
|
||||
|
||||
def test_logs_fill_zero_formula(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -2273,7 +2285,7 @@ def test_logs_fill_zero_formula(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -2355,6 +2367,7 @@ def test_logs_fill_zero_formula(
|
||||
|
||||
def test_logs_fill_zero_formula_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -2380,7 +2393,7 @@ def test_logs_fill_zero_formula_with_group_by(
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
@@ -6,12 +6,13 @@ from typing import Callable, List
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
|
||||
|
||||
def test_logs_json_body_simple_searches(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -85,7 +86,7 @@ def test_logs_json_body_simple_searches(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
|
||||
|
||||
# Test 1: Search by body.message = "User logged in successfully"
|
||||
response = requests.post(
|
||||
@@ -300,6 +301,7 @@ def test_logs_json_body_simple_searches(
|
||||
|
||||
def test_logs_json_body_nested_keys(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -392,7 +394,7 @@ def test_logs_json_body_nested_keys(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
|
||||
|
||||
# Test 1: Search by body.user.name = "john_doe"
|
||||
response = requests.post(
|
||||
@@ -569,6 +571,7 @@ def test_logs_json_body_nested_keys(
|
||||
|
||||
def test_logs_json_body_array_membership(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -645,7 +648,7 @@ def test_logs_json_body_array_membership(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
|
||||
|
||||
# Test 1: Search by has(body.tags[*], "production")
|
||||
response = requests.post(
|
||||
@@ -776,6 +779,7 @@ def test_logs_json_body_array_membership(
|
||||
|
||||
def test_logs_json_body_listing(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
@@ -873,7 +877,7 @@ def test_logs_json_body_listing(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
|
||||
|
||||
# Test 1: List all logs with JSON bodies
|
||||
response = requests.post(
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Callable, Dict, List
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
assert_minutely_bucket_values,
|
||||
@@ -16,6 +16,7 @@ from fixtures.querier import (
|
||||
|
||||
def test_metrics_fill_gaps(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -45,7 +46,7 @@ def test_metrics_fill_gaps(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -109,6 +110,7 @@ def test_metrics_fill_gaps(
|
||||
|
||||
def test_metrics_fill_gaps_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -137,7 +139,7 @@ def test_metrics_fill_gaps_with_group_by(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -217,6 +219,7 @@ def test_metrics_fill_gaps_with_group_by(
|
||||
|
||||
def test_metrics_fill_gaps_formula(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -246,7 +249,7 @@ def test_metrics_fill_gaps_formula(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -339,6 +342,7 @@ def test_metrics_fill_gaps_formula(
|
||||
|
||||
def test_metrics_fill_gaps_formula_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -381,7 +385,7 @@ def test_metrics_fill_gaps_formula_with_group_by(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -494,6 +498,7 @@ def test_metrics_fill_gaps_formula_with_group_by(
|
||||
|
||||
def test_metrics_fill_zero(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -515,7 +520,7 @@ def test_metrics_fill_zero(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -578,6 +583,7 @@ def test_metrics_fill_zero(
|
||||
|
||||
def test_metrics_fill_zero_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -605,7 +611,7 @@ def test_metrics_fill_zero_with_group_by(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -685,6 +691,7 @@ def test_metrics_fill_zero_with_group_by(
|
||||
|
||||
def test_metrics_fill_zero_formula(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -713,7 +720,7 @@ def test_metrics_fill_zero_formula(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -806,6 +813,7 @@ def test_metrics_fill_zero_formula(
|
||||
|
||||
def test_metrics_fill_zero_formula_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -848,7 +856,7 @@ def test_metrics_fill_zero_formula_with_group_by(
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
@@ -6,7 +6,7 @@ import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.querier import (
|
||||
assert_minutely_bucket_values,
|
||||
find_named_result,
|
||||
@@ -23,6 +23,7 @@ from src.querier.util import (
|
||||
|
||||
def test_traces_list(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -149,7 +150,7 @@ def test_traces_list(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Query all traces for the past 5 minutes
|
||||
response = requests.post(
|
||||
@@ -661,6 +662,7 @@ def test_traces_list(
|
||||
)
|
||||
def test_traces_list_with_corrupt_data(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
payload: Dict[str, Any],
|
||||
@@ -678,7 +680,7 @@ def test_traces_list_with_corrupt_data(
|
||||
# 4 Traces with corrupt metadata inserted
|
||||
# traces[i] occured before traces[j] where i < j
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
@@ -764,6 +766,7 @@ def test_traces_list_with_corrupt_data(
|
||||
)
|
||||
def test_traces_aggergate_order_by_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
order_by: Dict[str, str],
|
||||
@@ -890,7 +893,7 @@ def test_traces_aggergate_order_by_count(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
query = {
|
||||
"type": "builder_query",
|
||||
@@ -934,6 +937,7 @@ def test_traces_aggergate_order_by_count(
|
||||
|
||||
def test_traces_aggregate_with_mixed_field_selectors(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1057,7 +1061,7 @@ def test_traces_aggregate_with_mixed_field_selectors(
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
query = {
|
||||
"type": "builder_query",
|
||||
@@ -1110,6 +1114,7 @@ def test_traces_aggregate_with_mixed_field_selectors(
|
||||
|
||||
def test_traces_fill_gaps(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1136,7 +1141,7 @@ def test_traces_fill_gaps(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1193,6 +1198,7 @@ def test_traces_fill_gaps(
|
||||
|
||||
def test_traces_fill_gaps_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1231,7 +1237,7 @@ def test_traces_fill_gaps_with_group_by(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1305,6 +1311,7 @@ def test_traces_fill_gaps_with_group_by(
|
||||
|
||||
def test_traces_fill_gaps_formula(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1343,7 +1350,7 @@ def test_traces_fill_gaps_formula(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1424,6 +1431,7 @@ def test_traces_fill_gaps_formula(
|
||||
|
||||
def test_traces_fill_gaps_formula_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1462,7 +1470,7 @@ def test_traces_fill_gaps_formula_with_group_by(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1564,6 +1572,7 @@ def test_traces_fill_gaps_formula_with_group_by(
|
||||
|
||||
def test_traces_fill_zero(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1589,7 +1598,7 @@ def test_traces_fill_zero(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1646,6 +1655,7 @@ def test_traces_fill_zero(
|
||||
|
||||
def test_traces_fill_zero_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1684,7 +1694,7 @@ def test_traces_fill_zero_with_group_by(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1759,6 +1769,7 @@ def test_traces_fill_zero_with_group_by(
|
||||
|
||||
def test_traces_fill_zero_formula(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1797,7 +1808,7 @@ def test_traces_fill_zero_formula(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
@@ -1878,6 +1889,7 @@ def test_traces_fill_zero_formula(
|
||||
|
||||
def test_traces_fill_zero_formula_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
@@ -1916,7 +1928,7 @@ def test_traces_fill_zero_formula_with_group_by(
|
||||
]
|
||||
insert_traces(traces)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
@@ -8,7 +8,7 @@ from http import HTTPStatus
|
||||
from typing import Any, Callable, List
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
@@ -23,6 +23,7 @@ CUMULATIVE_COUNTERS_FILE = os.path.join(TESTDATA_DIR, "cumulative_counters_1h.js
|
||||
|
||||
def test_rate_with_steady_values_and_reset(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -38,7 +39,7 @@ def test_rate_with_steady_values_and_reset(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
@@ -72,6 +73,7 @@ def test_rate_with_steady_values_and_reset(
|
||||
|
||||
def test_rate_group_by_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
@@ -87,7 +89,7 @@ def test_rate_group_by_endpoint(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Callable, Dict, List, Optional, Tuple
|
||||
import requests
|
||||
|
||||
from fixtures import querier, types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCode
|
||||
@@ -210,13 +210,14 @@ def build_metrics_query(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_no_order(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -237,13 +238,14 @@ def test_logs_scalar_group_by_single_agg_no_order(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -264,13 +266,14 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -291,13 +294,14 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -322,13 +326,14 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -353,13 +358,14 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
|
||||
def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -384,13 +390,14 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
|
||||
def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -416,13 +423,14 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -447,13 +455,14 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
|
||||
def test_logs_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -478,13 +487,14 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
|
||||
def test_logs_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -509,13 +519,14 @@ def test_logs_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_no_order(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -536,13 +547,14 @@ def test_traces_scalar_group_by_single_agg_no_order(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -563,13 +575,14 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -590,13 +603,14 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -621,13 +635,14 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -652,13 +667,14 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
|
||||
def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -683,13 +699,14 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -714,13 +731,14 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
|
||||
def test_traces_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -745,13 +763,14 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
|
||||
def test_traces_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -776,13 +795,14 @@ def test_traces_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
|
||||
def test_metrics_scalar_group_by_single_agg_no_order(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -808,13 +828,14 @@ def test_metrics_scalar_group_by_single_agg_no_order(
|
||||
|
||||
def test_metrics_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -845,13 +866,14 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
|
||||
def test_metrics_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -882,13 +904,14 @@ def test_metrics_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
|
||||
def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -915,13 +938,14 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
|
||||
def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
@@ -948,13 +972,14 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
|
||||
def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Callable, List
|
||||
import pytest
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
@@ -38,6 +38,7 @@ MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path(
|
||||
)
|
||||
def test_with_steady_values_and_reset(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
time_aggregation: str,
|
||||
@@ -57,7 +58,7 @@ def test_with_steady_values_and_reset(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
@@ -98,6 +99,7 @@ def test_with_steady_values_and_reset(
|
||||
)
|
||||
def test_group_by_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
time_aggregation: str,
|
||||
@@ -120,7 +122,7 @@ def test_group_by_endpoint(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
@@ -280,6 +282,7 @@ def test_group_by_endpoint(
|
||||
)
|
||||
def test_for_service_with_switch(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
time_aggregation: str,
|
||||
@@ -299,7 +302,7 @@ def test_for_service_with_switch(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
@@ -336,6 +339,7 @@ def test_for_service_with_switch(
|
||||
)
|
||||
def test_for_week_long_time_range(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
time_aggregation: str,
|
||||
@@ -353,7 +357,7 @@ def test_for_week_long_time_range(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
@@ -379,6 +383,7 @@ def test_for_week_long_time_range(
|
||||
)
|
||||
def test_for_month_long_time_range(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
time_aggregation: str,
|
||||
@@ -396,7 +401,7 @@ def test_for_month_long_time_range(
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
|
||||
@@ -5,17 +5,18 @@ import pytest
|
||||
import requests
|
||||
from sqlalchemy import sql
|
||||
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import SigNoz
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.types import Operation, SigNoz
|
||||
|
||||
ANONYMOUS_USER_ID = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
|
||||
def test_managed_roles_create_on_register(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# get the list of all roles.
|
||||
response = requests.get(
|
||||
@@ -44,9 +45,10 @@ def test_managed_roles_create_on_register(
|
||||
def test_root_user_signoz_admin_assignment(
|
||||
request: pytest.FixtureRequest,
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get the user from the /user/me endpoint and extract the id
|
||||
user_response = requests.get(
|
||||
@@ -104,9 +106,10 @@ def test_root_user_signoz_admin_assignment(
|
||||
def test_anonymous_user_signoz_anonymous_assignment(
|
||||
request: pytest.FixtureRequest,
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/roles"),
|
||||
|
||||
@@ -5,17 +5,22 @@ import pytest
|
||||
import requests
|
||||
from sqlalchemy import sql
|
||||
|
||||
from fixtures.auth import USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD
|
||||
from fixtures.signoz import ROOT_USER_EMAIL, ROOT_USER_PASSWORD
|
||||
from fixtures.types import SigNoz
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
USER_EDITOR_EMAIL,
|
||||
USER_EDITOR_PASSWORD,
|
||||
)
|
||||
from fixtures.types import Operation, SigNoz
|
||||
|
||||
|
||||
def test_user_invite_accept_role_grant(
|
||||
request: pytest.FixtureRequest,
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# invite a user as editor
|
||||
invite_payload = {
|
||||
@@ -95,6 +100,7 @@ def test_user_invite_accept_role_grant(
|
||||
def test_user_update_role_grant(
|
||||
request: pytest.FixtureRequest,
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
# Get the editor user's id
|
||||
@@ -108,7 +114,7 @@ def test_user_update_role_grant(
|
||||
editor_id = user_me_response.json()["data"]["id"]
|
||||
|
||||
# Get the role id for viewer
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
roles_response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
@@ -171,6 +177,7 @@ def test_user_update_role_grant(
|
||||
def test_user_delete_role_revoke(
|
||||
request: pytest.FixtureRequest,
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
# login with editor to get the user_id and check if user exists
|
||||
@@ -184,7 +191,7 @@ def test_user_delete_role_revoke(
|
||||
editor_id = user_me_response.json()["data"]["id"]
|
||||
|
||||
# delete the editor user
|
||||
admin_token = get_token(ROOT_USER_EMAIL, ROOT_USER_PASSWORD)
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
delete_response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{editor_id}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user