Compare commits

...

9 Commits

Author SHA1 Message Date
amlannandy
10d198f305 chore: reset threshold unit whenever base y axis unit changes 2026-01-21 15:23:25 +07:00
Pandey
3051d442c0 fix: move ee references out of cmd/community (#10063)
- move ee references out of cmd/community
- add check in commitci
2026-01-21 09:22:40 +05:30
Karan Balani
ea15ce4e04 feat: sso stats reporting (#10062)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-01-20 18:57:35 +00:00
Ashwin Bhatkal
865a7a5a31 fix: promql and clickhouse query based panels refresh on dynamic variable change (#10060)
* fix: promql and clickhouse query based panels refresh on dynamic variable change

* chore: add test
2026-01-20 17:19:58 +00:00
swapnil-signoz
de4ca50a40 refactor: using global config's ingestion URL (#10019)
* refactor: using global config's ingestion URL

* refactor: add global ingestion URL to configuration

---------

Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2026-01-20 17:05:56 +00:00
Amlan Kumar Nandy
8cabaafc58 fix: handle threshold unit scaling issues (#10020) 2026-01-20 16:22:49 +00:00
Ashwin Bhatkal
e9d66b8094 chore: update yarn.lock (#10051) 2026-01-20 16:07:05 +00:00
Karan Balani
26d3d6b1e4 feat: gateway apis (#10010) 2026-01-20 15:46:46 +00:00
Ashwin Bhatkal
36d6debeab chore: update .gitignore with only settings.json of .vscode folder (#10058) 2026-01-20 13:54:58 +00:00
36 changed files with 2091 additions and 144 deletions

View File

@@ -25,3 +25,10 @@ jobs:
else
echo "No references to 'ee' packages found in 'pkg' directory"
fi
if grep -R --include="*.go" '.*/ee/.*' cmd/community/; then
echo "Error: Found references to 'ee' packages in 'cmd/community' directory"
exit 1
else
echo "No references to 'ee' packages found in 'cmd/community' directory"
fi

4
.gitignore vendored
View File

@@ -1,6 +1,9 @@
node_modules
.vscode
!.vscode/settings.json
deploy/docker/environment_tiny/common_test
frontend/node_modules
frontend/.pnp
@@ -104,7 +107,6 @@ dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/

View File

@@ -5,13 +5,14 @@ import (
"log/slog"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
@@ -24,7 +25,6 @@ import (
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
@@ -57,13 +57,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
// print the version
version.Info.PrettyPrint(config.Version)
// add enterprise sqlstore factories to the community sqlstore factories
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
return err
}
signoz, err := signoz.New(
ctx,
config,
@@ -90,6 +83,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Module, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -10,6 +10,7 @@ import (
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
@@ -22,6 +23,7 @@ import (
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
@@ -120,6 +122,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, role, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -2067,6 +2067,361 @@ paths:
summary: Get features
tags:
- features
/api/v2/gateway/ingestion_keys:
get:
deprecated: false
description: This endpoint returns the ingestion keys for a workspace
operationId: GetIngestionKeys
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/GatewaytypesGettableIngestionKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Get ingestion keys for workspace
tags:
- gateway
post:
deprecated: false
description: This endpoint creates an ingestion key for the workspace
operationId: CreateIngestionKey
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/GatewaytypesPostableIngestionKey'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/GatewaytypesGettableCreatedIngestionKey'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Create ingestion key for workspace
tags:
- gateway
/api/v2/gateway/ingestion_keys/{keyId}:
delete:
deprecated: false
description: This endpoint deletes an ingestion key for the workspace
operationId: DeleteIngestionKey
parameters:
- in: path
name: keyId
required: true
schema:
type: string
responses:
"204":
description: No Content
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Delete ingestion key for workspace
tags:
- gateway
patch:
deprecated: false
description: This endpoint updates an ingestion key for the workspace
operationId: UpdateIngestionKey
parameters:
- in: path
name: keyId
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/GatewaytypesPostableIngestionKey'
responses:
"204":
description: No Content
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Update ingestion key for workspace
tags:
- gateway
/api/v2/gateway/ingestion_keys/{keyId}/limits:
post:
deprecated: false
description: This endpoint creates an ingestion key limit
operationId: CreateIngestionKeyLimit
parameters:
- in: path
name: keyId
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/GatewaytypesPostableIngestionKeyLimit'
responses:
"201":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/GatewaytypesGettableCreatedIngestionKeyLimit'
status:
type: string
type: object
description: Created
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Create limit for the ingestion key
tags:
- gateway
/api/v2/gateway/ingestion_keys/limits/{limitId}:
delete:
deprecated: false
description: This endpoint deletes an ingestion key limit
operationId: DeleteIngestionKeyLimit
parameters:
- in: path
name: limitId
required: true
schema:
type: string
responses:
"204":
description: No Content
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Delete limit for the ingestion key
tags:
- gateway
patch:
deprecated: false
description: This endpoint updates an ingestion key limit
operationId: UpdateIngestionKeyLimit
parameters:
- in: path
name: limitId
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/GatewaytypesUpdatableIngestionKeyLimit'
responses:
"204":
description: No Content
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Update limit for the ingestion key
tags:
- gateway
/api/v2/gateway/ingestion_keys/search:
get:
deprecated: false
description: This endpoint returns the ingestion keys for a workspace
operationId: SearchIngestionKeys
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/GatewaytypesGettableIngestionKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Search ingestion keys for workspace
tags:
- gateway
/api/v2/metric/alerts:
get:
deprecated: false
@@ -3051,6 +3406,160 @@ components:
nullable: true
type: object
type: object
GatewaytypesGettableCreatedIngestionKey:
properties:
id:
type: string
value:
type: string
type: object
GatewaytypesGettableCreatedIngestionKeyLimit:
properties:
id:
type: string
type: object
GatewaytypesGettableIngestionKeys:
properties:
_pagination:
$ref: '#/components/schemas/GatewaytypesPagination'
keys:
items:
$ref: '#/components/schemas/GatewaytypesIngestionKey'
nullable: true
type: array
type: object
GatewaytypesIngestionKey:
properties:
created_at:
format: date-time
type: string
expires_at:
format: date-time
type: string
id:
type: string
limits:
items:
$ref: '#/components/schemas/GatewaytypesLimit'
nullable: true
type: array
name:
type: string
tags:
items:
type: string
nullable: true
type: array
updated_at:
format: date-time
type: string
value:
type: string
workspace_id:
type: string
type: object
GatewaytypesLimit:
properties:
config:
$ref: '#/components/schemas/GatewaytypesLimitConfig'
created_at:
format: date-time
type: string
id:
type: string
key_id:
type: string
metric:
$ref: '#/components/schemas/GatewaytypesLimitMetric'
signal:
type: string
tags:
items:
type: string
nullable: true
type: array
updated_at:
format: date-time
type: string
type: object
GatewaytypesLimitConfig:
properties:
day:
$ref: '#/components/schemas/GatewaytypesLimitValue'
second:
$ref: '#/components/schemas/GatewaytypesLimitValue'
type: object
GatewaytypesLimitMetric:
properties:
day:
$ref: '#/components/schemas/GatewaytypesLimitMetricValue'
second:
$ref: '#/components/schemas/GatewaytypesLimitMetricValue'
type: object
GatewaytypesLimitMetricValue:
properties:
count:
format: int64
type: integer
size:
format: int64
type: integer
type: object
GatewaytypesLimitValue:
properties:
count:
format: int64
type: integer
size:
format: int64
type: integer
type: object
GatewaytypesPagination:
properties:
page:
type: integer
pages:
type: integer
per_page:
type: integer
total:
type: integer
type: object
GatewaytypesPostableIngestionKey:
properties:
expires_at:
format: date-time
type: string
name:
type: string
tags:
items:
type: string
nullable: true
type: array
type: object
GatewaytypesPostableIngestionKeyLimit:
properties:
config:
$ref: '#/components/schemas/GatewaytypesLimitConfig'
signal:
type: string
tags:
items:
type: string
nullable: true
type: array
type: object
GatewaytypesUpdatableIngestionKeyLimit:
properties:
config:
$ref: '#/components/schemas/GatewaytypesLimitConfig'
tags:
items:
type: string
nullable: true
type: array
type: object
MetricsexplorertypesMetricAlert:
properties:
alertId:

View File

@@ -0,0 +1,282 @@
package httpgateway
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"net/url"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/tidwall/gjson"
)
type Provider struct {
settings factory.ScopedProviderSettings
config gateway.Config
httpClient *client.Client
licensing licensing.Licensing
}
func NewProviderFactory(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, ps factory.ProviderSettings, c gateway.Config) (gateway.Gateway, error) {
return New(ctx, ps, c, licensing)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config gateway.Config, licensing licensing.Licensing) (gateway.Gateway, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/gateway/httpgateway")
httpClient, err := client.New(
settings.Logger(),
providerSettings.TracerProvider,
providerSettings.MeterProvider,
client.WithRequestResponseLog(true),
client.WithRetryCount(3),
)
if err != nil {
return nil, err
}
return &Provider{
settings: settings,
config: config,
httpClient: httpClient,
licensing: licensing,
}, nil
}
func (provider *Provider) GetIngestionKeys(ctx context.Context, orgID valuer.UUID, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
qParams := url.Values{}
qParams.Add("page", strconv.Itoa(page))
qParams.Add("per_page", strconv.Itoa(perPage))
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys", qParams, nil)
if err != nil {
return nil, err
}
var ingestionKeys []gatewaytypes.IngestionKey
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
return nil, err
}
var pagination gatewaytypes.Pagination
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
return nil, err
}
return &gatewaytypes.GettableIngestionKeys{
Keys: ingestionKeys,
Pagination: pagination,
}, nil
}
func (provider *Provider) SearchIngestionKeysByName(ctx context.Context, orgID valuer.UUID, name string, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
qParams := url.Values{}
qParams.Add("name", name)
qParams.Add("page", strconv.Itoa(page))
qParams.Add("per_page", strconv.Itoa(perPage))
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys/search", qParams, nil)
if err != nil {
return nil, err
}
var ingestionKeys []gatewaytypes.IngestionKey
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
return nil, err
}
var pagination gatewaytypes.Pagination
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
return nil, err
}
return &gatewaytypes.GettableIngestionKeys{
Keys: ingestionKeys,
Pagination: pagination,
}, nil
}
func (provider *Provider) CreateIngestionKey(ctx context.Context, orgID valuer.UUID, name string, tags []string, expiresAt time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error) {
requestBody := gatewaytypes.PostableIngestionKey{
Name: name,
Tags: tags,
ExpiresAt: expiresAt,
}
requestBodyBytes, err := json.Marshal(requestBody)
if err != nil {
return nil, err
}
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys", nil, requestBodyBytes)
if err != nil {
return nil, err
}
var createdKeyResponse gatewaytypes.GettableCreatedIngestionKey
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdKeyResponse); err != nil {
return nil, err
}
return &createdKeyResponse, nil
}
func (provider *Provider) UpdateIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string, name string, tags []string, expiresAt time.Time) error {
requestBody := gatewaytypes.PostableIngestionKey{
Name: name,
Tags: tags,
ExpiresAt: expiresAt,
}
requestBodyBytes, err := json.Marshal(requestBody)
if err != nil {
return err
}
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/keys/"+keyID, nil, requestBodyBytes)
if err != nil {
return err
}
return nil
}
func (provider *Provider) DeleteIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string) error {
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/keys/"+keyID, nil, nil)
if err != nil {
return err
}
return nil
}
func (provider *Provider) CreateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, keyID string, signal string, limitConfig gatewaytypes.LimitConfig, tags []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error) {
requestBody := gatewaytypes.PostableIngestionKeyLimit{
Signal: signal,
Config: limitConfig,
Tags: tags,
}
requestBodyBytes, err := json.Marshal(requestBody)
if err != nil {
return nil, err
}
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys/"+keyID+"/limits", nil, requestBodyBytes)
if err != nil {
return nil, err
}
var createdIngestionKeyLimitResponse gatewaytypes.GettableCreatedIngestionKeyLimit
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdIngestionKeyLimitResponse); err != nil {
return nil, err
}
return &createdIngestionKeyLimitResponse, nil
}
func (provider *Provider) UpdateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string, limitConfig gatewaytypes.LimitConfig, tags []string) error {
requestBody := gatewaytypes.UpdatableIngestionKeyLimit{
Config: limitConfig,
Tags: tags,
}
requestBodyBytes, err := json.Marshal(requestBody)
if err != nil {
return err
}
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/limits/"+limitID, nil, requestBodyBytes)
if err != nil {
return err
}
return nil
}
func (provider *Provider) DeleteIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string) error {
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/limits/"+limitID, nil, nil)
if err != nil {
return err
}
return nil
}
func (provider *Provider) do(ctx context.Context, orgID valuer.UUID, method string, path string, queryParams url.Values, body []byte) ([]byte, error) {
license, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "no valid license found").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
// build url
requestURL := provider.config.URL.JoinPath(path)
// add query params to the url
if queryParams != nil {
requestURL.RawQuery = queryParams.Encode()
}
// build request
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), bytes.NewBuffer(body))
if err != nil {
return nil, err
}
// add headers needed to call gateway
request.Header.Set("Content-Type", "application/json")
request.Header.Set("X-Signoz-Cloud-Api-Key", license.Key)
request.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
request.Header.Set("X-Consumer-Groups", "ns:default")
// execute request
response, err := provider.httpClient.Do(request)
if err != nil {
return nil, err
}
// read response
defer response.Body.Close()
responseBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
// only 2XX
if response.StatusCode/100 == 2 {
return responseBody, nil
}
errorMessage := gjson.GetBytes(responseBody, "error").String()
if errorMessage == "" {
errorMessage = "an unknown error occurred"
}
// return error for non 2XX
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
}
func (provider *Provider) errFromStatusCode(code int, errorMessage string) error {
switch code {
case http.StatusBadRequest:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
case http.StatusUnauthorized:
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
case http.StatusForbidden:
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
case http.StatusNotFound:
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
case http.StatusConflict:
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
}
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -36,6 +37,7 @@ type APIHandlerOptions struct {
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
GlobalConfig global.Config
}
type APIHandler struct {

View File

@@ -76,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
@@ -84,7 +84,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
result.IngestionUrl = ingestionUrl
result.IngestionUrl = ah.opts.GlobalConfig.IngestionURL.String()
result.SigNozAPIUrl = signozApiUrl
gatewayUrl := ah.opts.GatewayUrl
@@ -186,7 +186,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
}
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, string, *basemodel.ApiError,
string, *basemodel.ApiError,
) {
// TODO: remove this struct from here
type deploymentResponse struct {
@@ -200,7 +200,7 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", "", basemodel.InternalError(fmt.Errorf(
return "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: error: %w", err,
))
}
@@ -209,7 +209,7 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", "", basemodel.InternalError(fmt.Errorf(
return "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
}
@@ -219,16 +219,14 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", "", basemodel.InternalError(fmt.Errorf(
return "", basemodel.InternalError(fmt.Errorf(
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
))
}
ingestionUrl := fmt.Sprintf("https://ingest.%s", regionDns)
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
return ingestionUrl, signozApiUrl, nil
return signozApiUrl, nil
}
type ingestionKey struct {

View File

@@ -172,6 +172,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)

View File

@@ -37,10 +37,7 @@ function ThresholdItem({
);
if (units.length === 0) {
component = (
<Tooltip
trigger="hover"
title="Please select a Y-axis unit for the query first"
>
<Tooltip trigger="hover" title="No compatible units available">
<Select
placeholder="Unit"
value={threshold.unit ? threshold.unit : null}

View File

@@ -47,9 +47,17 @@ export function getCategoryByOptionId(id: string): string | undefined {
}
export function getCategorySelectOptionByName(
name: string,
name: string | undefined,
): DefaultOptionType[] {
if (!name) {
return [];
}
const categories = getYAxisCategories(YAxisSource.ALERTS);
if (!categories.length) {
return [];
}
return (
categories
.find((category) => category.name === name)

View File

@@ -1,12 +1,15 @@
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import {
UniversalYAxisUnit,
YAxisSource,
} from 'components/YAxisUnitSelector/types';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useCreateAlertState } from 'container/CreateAlertV2/context';
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useGetYAxisUnit from 'hooks/useGetYAxisUnit';
import { useEffect, useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { AlertTypes } from 'types/api/alerts/alertTypes';
@@ -26,6 +29,7 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
alertState,
setAlertState,
isEditMode,
setThresholdState,
} = useCreateAlertState();
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
@@ -51,6 +55,18 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
}
}, [initialYAxisUnit, setAlertState, shouldUpdateYAxisUnit]);
const handleYAxisUnitChange = useCallback(
(value: UniversalYAxisUnit): void => {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
// Reset all threshold units when the y-axis unit changes
setThresholdState({
type: 'SET_THRESHOLDS',
payload: thresholdState.thresholds.map((t) => ({ ...t, unit: '' })),
});
},
[setAlertState, setThresholdState, thresholdState.thresholds],
);
const headline = (
<div className="chart-preview-headline">
<PlotTag
@@ -60,9 +76,7 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
<YAxisUnitSelector
value={yAxisUnit}
initialValue={initialYAxisUnit}
onChange={(value): void => {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
}}
onChange={handleYAxisUnitChange}
source={YAxisSource.ALERTS}
loading={isLoading}
/>

View File

@@ -4,3 +4,7 @@
gap: 8px;
align-items: center;
}
.rule-unit-selector {
width: 150px;
}

View File

@@ -15,8 +15,7 @@ import { DefaultOptionType } from 'antd/es/select';
import {
getCategoryByOptionId,
getCategorySelectOptionByName,
} from 'container/NewWidget/RightContainer/alertFomatCategories';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
} from 'container/CreateAlertV2/AlertCondition/utils';
import { useTranslation } from 'react-i18next';
import {
AlertDef,
@@ -43,10 +42,10 @@ function RuleOptions({
setAlertDef,
queryCategory,
queryOptions,
yAxisUnit,
}: RuleOptionsProps): JSX.Element {
// init namespace for translations
const { t } = useTranslation('alerts');
const { currentQuery } = useQueryBuilder();
const { ruleType } = alertDef;
@@ -365,11 +364,9 @@ function RuleOptions({
</InlineSelect>
);
const selectedCategory = getCategoryByOptionId(currentQuery?.unit || '');
const selectedCategory = getCategoryByOptionId(yAxisUnit);
const categorySelectOptions = getCategorySelectOptionByName(
selectedCategory?.name,
);
const categorySelectOptions = getCategorySelectOptionByName(selectedCategory);
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
@@ -402,6 +399,7 @@ function RuleOptions({
<Form.Item noStyle>
<Select
className="rule-unit-selector"
getPopupContainer={popupContainer}
allowClear
showSearch
@@ -515,5 +513,6 @@ interface RuleOptionsProps {
setAlertDef: (a: AlertDef) => void;
queryCategory: EQueryType;
queryOptions: DefaultOptionType[];
yAxisUnit: string;
}
export default RuleOptions;

View File

@@ -914,6 +914,7 @@ function FormAlertRules({
alertDef={alertDef}
setAlertDef={setAlertDef}
queryOptions={queryOptions}
yAxisUnit={yAxisUnit || ''}
/>
{renderBasicInfo()}

View File

@@ -0,0 +1,183 @@
/* eslint-disable sonarjs/no-duplicate-string */
import {
initialClickHouseData,
initialQueryBuilderFormValuesMap,
initialQueryPromQLData,
PANEL_TYPES,
} from 'constants/queryBuilder';
import { IDashboardVariable, Widgets } from 'types/api/dashboard/getAll';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { createDynamicVariableToWidgetsMap } from './utils';
const createMockDynamicVariable = (
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable => ({
id: 'var-1',
name: 'testVar',
description: '',
type: 'DYNAMIC',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
dynamicVariablesAttribute: 'service.name',
...overrides,
});
const createBaseWidget = (id: string, query: Query): Widgets => ({
id,
title: 'Test Widget',
description: '',
panelTypes: PANEL_TYPES.TIME_SERIES,
opacity: '1',
nullZeroValues: '',
timePreferance: 'GLOBAL_TIME',
softMin: null,
softMax: null,
selectedLogFields: null,
selectedTracesFields: null,
query,
});
const createMockPromQLWidget = (
id: string,
queries: {
query: string;
name?: string;
legend?: string;
disabled?: boolean;
}[],
): Widgets => {
const promqlQueries = queries.map((q) => ({
...initialQueryPromQLData,
query: q.query,
name: q.name || 'A',
legend: q.legend || '',
disabled: q.disabled ?? false,
}));
const query: Query = {
queryType: EQueryType.PROM,
promql: promqlQueries,
builder: {
queryData: [],
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [],
id: 'query-1',
};
return createBaseWidget(id, query);
};
const createMockClickHouseWidget = (
id: string,
queries: {
query: string;
name?: string;
legend?: string;
disabled?: boolean;
}[],
): Widgets => {
const clickhouseQueries = queries.map((q) => ({
...initialClickHouseData,
query: q.query,
name: q.name || 'A',
legend: q.legend || '',
disabled: q.disabled ?? false,
}));
const query: Query = {
queryType: EQueryType.CLICKHOUSE,
promql: [],
builder: {
queryData: [],
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: clickhouseQueries,
id: 'query-1',
};
return createBaseWidget(id, query);
};
const createMockQueryBuilderWidget = (
id: string,
filters: { key: string; value: string | string[]; op?: string }[],
): Widgets => {
const queryData = {
...initialQueryBuilderFormValuesMap[DataSource.LOGS],
queryName: 'A',
filters: {
items: filters.map((f, index) => ({
id: `filter-${index}`,
key: { key: f.key, dataType: DataTypes.String, type: '', id: f.key },
op: f.op || '=',
value: f.value,
})),
op: 'AND',
},
};
const query: Query = {
queryType: EQueryType.QUERY_BUILDER,
promql: [],
builder: {
queryData: [queryData],
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [],
id: 'query-1',
};
return createBaseWidget(id, query);
};
describe('createDynamicVariableToWidgetsMap', () => {
it('should handle widgets with different query types', () => {
const dynamicVariables = [
createMockDynamicVariable({
id: 'var-1',
name: 'service.name123',
dynamicVariablesAttribute: 'service.name',
}),
];
const widgets = [
createMockPromQLWidget('widget-promql-pass', [
{ query: 'up{service="$service.name123"}' },
]),
createMockPromQLWidget('widget-promql-fail', [
{ query: 'up{service="$service.name"}' },
]),
createMockClickHouseWidget('widget-clickhouse-pass', [
{ query: "SELECT * FROM logs WHERE service_name = '$service.name123'" },
]),
createMockClickHouseWidget('widget-clickhouse-fail', [
{ query: "SELECT * FROM logs WHERE service_name = '$service.name'" },
]),
createMockQueryBuilderWidget('widget-builder-pass', [
{ key: 'service.name', value: '$service.name123' },
]),
createMockQueryBuilderWidget('widget-builder-fail', [
{ key: 'service.name', value: '$service.name' },
]),
];
const result = createDynamicVariableToWidgetsMap(dynamicVariables, widgets);
expect(result['var-1']).toContain('widget-promql-pass');
expect(result['var-1']).toContain('widget-clickhouse-pass');
expect(result['var-1']).toContain('widget-builder-pass');
expect(result['var-1']).not.toContain('widget-promql-fail');
expect(result['var-1']).not.toContain('widget-clickhouse-fail');
expect(result['var-1']).not.toContain('widget-builder-fail');
});
});

View File

@@ -104,10 +104,9 @@ export const createDynamicVariableToWidgetsMap = (
// Check each widget for usage of dynamic variables
if (Array.isArray(widgets)) {
widgets.forEach((widget) => {
if (
widget.query?.builder?.queryData &&
widget.query?.queryType === EQueryType.QUERY_BUILDER
) {
if (widget.query?.queryType === EQueryType.QUERY_BUILDER) {
if (!Array.isArray(widget.query.builder.queryData)) return;
widget.query.builder.queryData.forEach((queryData: IBuilderQuery) => {
// Check filter items for dynamic variables
queryData.filters?.items?.forEach((filter: TagFilterItem) => {
@@ -139,6 +138,34 @@ export const createDynamicVariableToWidgetsMap = (
});
}
});
} else if (widget.query?.queryType === EQueryType.PROM) {
if (!Array.isArray(widget.query.promql)) return;
widget.query.promql.forEach((promqlQuery) => {
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
promqlQuery.query?.includes(`$${variable.name}`) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
}
});
});
} else if (widget.query?.queryType === EQueryType.CLICKHOUSE) {
if (!Array.isArray(widget.query.clickhouse_sql)) return;
widget.query.clickhouse_sql.forEach((clickhouseQuery) => {
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
clickhouseQuery.query?.includes(`$${variable.name}`) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
}
});
});
}
});
}

View File

@@ -0,0 +1,132 @@
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { convertValue, getFormattedUnit } from 'lib/getConvertedValue';
describe('getFormattedUnit', () => {
it('should return the grafana unit for universal unit if it exists', () => {
const formattedUnit = getFormattedUnit(UniversalYAxisUnit.KILOBYTES);
expect(formattedUnit).toBe('deckbytes');
});
it('should return the unit directly if it is not a universal unit', () => {
const formattedUnit = getFormattedUnit('{reason}');
expect(formattedUnit).toBe('{reason}');
});
it('should return the universal unit directly if it does not have a grafana equivalent', () => {
const formattedUnit = getFormattedUnit(UniversalYAxisUnit.EXABYTES);
expect(formattedUnit).toBe(UniversalYAxisUnit.EXABYTES);
});
});
describe('convertValue', () => {
describe('data', () => {
it('should convert bytes (IEC) to kilobytes', () => {
expect(
convertValue(
1000,
UniversalYAxisUnit.BYTES_IEC,
UniversalYAxisUnit.KILOBYTES,
),
).toBe(1);
});
it('should convert bytes (SI) to kilobytes', () => {
expect(
convertValue(1000, UniversalYAxisUnit.BYTES, UniversalYAxisUnit.KILOBYTES),
).toBe(1);
});
it('should convert kilobytes to bytes', () => {
expect(
convertValue(1, UniversalYAxisUnit.KILOBYTES, UniversalYAxisUnit.BYTES),
).toBe(1000);
});
it('should convert megabytes to kilobytes', () => {
expect(convertValue(1, 'mbytes', 'kbytes')).toBe(1024);
});
it('should convert gigabytes to megabytes', () => {
expect(convertValue(1, 'gbytes', 'mbytes')).toBe(1024);
});
it('should convert kilobytes to megabytes', () => {
expect(convertValue(1024, 'kbytes', 'mbytes')).toBe(1);
});
it('should convert bits to gigabytes', () => {
// 12 GB = 103079215104 bits
expect(convertValue(103079215104, 'bits', 'gbytes')).toBe(12);
});
});
describe('time', () => {
it('should convert milliseconds to seconds', () => {
expect(convertValue(1000, 'ms', 's')).toBe(1);
});
it('should convert seconds to milliseconds', () => {
expect(convertValue(1, 's', 'ms')).toBe(1000);
});
it('should convert nanoseconds to milliseconds', () => {
expect(convertValue(1000000, 'ns', 'ms')).toBe(1);
});
it('should convert seconds to minutes', () => {
expect(convertValue(60, 's', 'm')).toBe(1);
});
it('should convert minutes to hours', () => {
expect(convertValue(60, 'm', 'h')).toBe(1);
});
});
describe('data rate', () => {
it('should convert bytes/sec to kibibytes/sec', () => {
expect(convertValue(1024, 'binBps', 'KiBs')).toBe(1);
});
it('should convert kibibytes/sec to bytes/sec', () => {
expect(convertValue(1, 'KiBs', 'binBps')).toBe(1024);
});
});
describe('throughput', () => {
it('should convert counts per second to counts per minute', () => {
expect(convertValue(1, 'cps', 'cpm')).toBe(1 / 60);
});
it('should convert operations per second to operations per minute', () => {
expect(convertValue(1, 'ops', 'opm')).toBe(1 / 60);
});
it('should convert counts per minute to counts per second', () => {
expect(convertValue(1, 'cpm', 'cps')).toBe(60);
});
it('should convert operations per minute to operations per second', () => {
expect(convertValue(1, 'opm', 'ops')).toBe(60);
});
});
describe('percent', () => {
it('should convert percentunit to percent', () => {
expect(convertValue(0.5, 'percentunit', 'percent')).toBe(50);
});
it('should convert percent to percentunit', () => {
expect(convertValue(50, 'percent', 'percentunit')).toBe(0.5);
});
});
describe('invalid values', () => {
it('should return null when currentUnit is invalid', () => {
expect(convertValue(100, 'invalidUnit', 'bytes')).toBeNull();
});
it('should return null when targetUnit is invalid', () => {
expect(convertValue(100, 'bytes', 'invalidUnit')).toBeNull();
});
});
});

View File

@@ -1,3 +1,17 @@
import {
UniversalUnitToGrafanaUnit,
Y_AXIS_UNIT_NAMES,
} from 'components/YAxisUnitSelector/constants';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
// 1 byte = 8 bits
// Or 1 bit = 1/8 bytes
const BIT_FACTOR = 1 / 8;
const DECIMAL_FACTOR = 1000;
const BINARY_FACTOR = 1024;
const unitsMapping = [
{
label: 'Data',
@@ -15,62 +29,132 @@ const unitsMapping = [
{
label: 'bits(IEC)',
value: 'bits',
factor: 8, // 1 byte = 8 bits
factor: BIT_FACTOR,
},
{
label: 'bits(SI)',
value: 'decbits',
factor: 8, // 1 byte = 8 bits
factor: BIT_FACTOR,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBITS],
value: UniversalYAxisUnit.KILOBITS,
factor: BIT_FACTOR * DECIMAL_FACTOR,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABITS],
value: UniversalYAxisUnit.MEGABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 2,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABITS],
value: UniversalYAxisUnit.GIGABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 3,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABITS],
value: UniversalYAxisUnit.TERABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 4,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABITS],
value: UniversalYAxisUnit.PETABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 5,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS],
value: UniversalYAxisUnit.EXABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS],
value: UniversalYAxisUnit.ZETTABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS],
value: UniversalYAxisUnit.YOTTABITS,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 8,
},
{
label: 'kibibytes',
value: 'kbytes',
factor: 1024,
factor: BINARY_FACTOR,
},
{
label: 'kilobytes',
value: 'deckbytes',
factor: 1000,
factor: DECIMAL_FACTOR,
},
{
label: 'mebibytes',
value: 'mbytes',
factor: 1024 * 1024,
factor: BINARY_FACTOR ** 2,
},
{
label: 'megabytes',
value: 'decmbytes',
factor: 1000 * 1000,
factor: DECIMAL_FACTOR ** 2,
},
{
label: 'gibibytes',
value: 'gbytes',
factor: 1024 * 1024 * 1024,
factor: BINARY_FACTOR ** 3,
},
{
label: 'gigabytes',
value: 'decgbytes',
factor: 1000 * 1000 * 1000,
factor: DECIMAL_FACTOR ** 3,
},
{
label: 'tebibytes',
value: 'tbytes',
factor: 1024 * 1024 * 1024 * 1024,
factor: BINARY_FACTOR ** 4,
},
{
label: 'terabytes',
value: 'dectbytes',
factor: 1000 * 1000 * 1000 * 1000,
factor: DECIMAL_FACTOR ** 4,
},
{
label: 'pebibytes',
value: 'pbytes',
factor: 1024 * 1024 * 1024 * 1024 * 1024,
factor: BINARY_FACTOR ** 5,
},
{
label: 'petabytes',
value: 'decpbytes',
factor: 1000 * 1000 * 1000 * 1000 * 1000,
factor: DECIMAL_FACTOR ** 5,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES],
value: UniversalYAxisUnit.EXABYTES,
factor: DECIMAL_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXBIBYTES],
value: UniversalYAxisUnit.EXBIBYTES,
factor: BINARY_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES],
value: UniversalYAxisUnit.ZETTABYTES,
factor: DECIMAL_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZEBIBYTES],
value: UniversalYAxisUnit.ZEBIBYTES,
factor: BINARY_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES],
value: UniversalYAxisUnit.YOTTABYTES,
factor: DECIMAL_FACTOR ** 8,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOBIBYTES],
value: UniversalYAxisUnit.YOBIBYTES,
factor: BINARY_FACTOR ** 8,
},
],
},
@@ -90,44 +174,103 @@ const unitsMapping = [
{
label: 'bits/sec(IEC)',
value: 'binbps',
factor: 8, // 1 byte = 8 bits
factor: BIT_FACTOR, // 1 byte = 8 bits
},
{
label: 'bits/sec(SI)',
value: 'bps',
factor: 8, // 1 byte = 8 bits
factor: BIT_FACTOR, // 1 byte = 8 bits
},
{
label: 'kibibytes/sec',
value: 'KiBs',
factor: 1024,
factor: BINARY_FACTOR,
},
{
label: 'kibibits/sec',
value: 'Kibits',
factor: 8 * 1024, // 1 KiB = 8 Kibits
factor: BIT_FACTOR * BINARY_FACTOR, // 1 KiB = 8 Kibits
},
{
label: 'kilobytes/sec',
value: 'KBs',
factor: 1000,
factor: DECIMAL_FACTOR,
},
{
label: 'kilobits/sec',
value: 'Kbits',
factor: 8 * 1000, // 1 KB = 8 Kbits
factor: BIT_FACTOR * DECIMAL_FACTOR, // 1 KB = 8 Kbits
},
{
label: 'mebibytes/sec',
value: 'MiBs',
factor: 1024 * 1024,
factor: BINARY_FACTOR ** 2,
},
{
label: 'mebibits/sec',
value: 'Mibits',
factor: 8 * 1024 * 1024, // 1 MiB = 8 Mibits
factor: BIT_FACTOR * BINARY_FACTOR ** 2, // 1 MiB = 8 Mibits
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES_SECOND],
value: UniversalYAxisUnit.EXABYTES_SECOND,
factor: DECIMAL_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES_SECOND],
value: UniversalYAxisUnit.ZETTABYTES_SECOND,
factor: DECIMAL_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES_SECOND],
value: UniversalYAxisUnit.YOTTABYTES_SECOND,
factor: DECIMAL_FACTOR ** 8,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXBIBYTES_SECOND],
value: UniversalYAxisUnit.EXBIBYTES_SECOND,
factor: BINARY_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZEBIBYTES_SECOND],
value: UniversalYAxisUnit.ZEBIBYTES_SECOND,
factor: BINARY_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOBIBYTES_SECOND],
value: UniversalYAxisUnit.YOBIBYTES_SECOND,
factor: BINARY_FACTOR ** 8,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS_SECOND],
value: UniversalYAxisUnit.EXABITS_SECOND,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS_SECOND],
value: UniversalYAxisUnit.ZETTABITS_SECOND,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS_SECOND],
value: UniversalYAxisUnit.YOTTABITS_SECOND,
factor: BIT_FACTOR * DECIMAL_FACTOR ** 8,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXBIBITS_SECOND],
value: UniversalYAxisUnit.EXBIBITS_SECOND,
factor: BIT_FACTOR * BINARY_FACTOR ** 6,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZEBIBITS_SECOND],
value: UniversalYAxisUnit.ZEBIBITS_SECOND,
factor: BIT_FACTOR * BINARY_FACTOR ** 7,
},
{
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOBIBITS_SECOND],
value: UniversalYAxisUnit.YOBIBITS_SECOND,
factor: BIT_FACTOR * BINARY_FACTOR ** 8,
},
// ... (other options)
],
},
{
@@ -268,6 +411,14 @@ function findUnitObject(
return unitObj || null;
}
export function getFormattedUnit(unit: string): string {
const isUniversalYAxisUnit = isUniversalUnit(unit);
if (isUniversalYAxisUnit) {
return UniversalUnitToGrafanaUnit[unit as UniversalYAxisUnit] || unit;
}
return unit;
}
export function convertValue(
value: number,
currentUnit?: string,
@@ -281,8 +432,12 @@ export function convertValue(
) {
return value;
}
const currentUnitObj = findUnitObject(currentUnit);
const targetUnitObj = findUnitObject(targetUnit);
const formattedCurrentUnit = getFormattedUnit(currentUnit);
const formattedTargetUnit = getFormattedUnit(targetUnit);
const currentUnitObj = findUnitObject(formattedCurrentUnit);
const targetUnitObj = findUnitObject(formattedTargetUnit);
if (currentUnitObj && targetUnitObj) {
const baseValue = value * currentUnitObj.factor;

View File

@@ -7634,11 +7634,6 @@ compute-scroll-into-view@^3.0.2:
resolved "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.0.3.tgz"
integrity sha512-nadqwNxghAGTamwIqQSG433W6OADZx2vCo3UXHNrzTRHK/htu+7+L0zhjEoaeaQVNAi3YgqWDv8+tzf0hRfR+A==
confusing-browser-globals@^1.0.10:
version "1.0.11"
resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz"
integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==
connect-history-api-fallback@^2.0.0:
version "2.0.0"
resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz"
@@ -9018,42 +9013,11 @@ escodegen@^2.0.0:
optionalDependencies:
source-map "~0.6.1"
eslint-config-airbnb-base@^15.0.0:
version "15.0.0"
resolved "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz"
integrity sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==
dependencies:
confusing-browser-globals "^1.0.10"
object.assign "^4.1.2"
object.entries "^1.1.5"
semver "^6.3.0"
eslint-config-airbnb-typescript@^16.1.4:
version "16.2.0"
resolved "https://registry.npmjs.org/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-16.2.0.tgz"
integrity sha512-OUaMPZpTOZGKd5tXOjJ9PRU4iYNW/Z5DoHIynjsVK/FpkWdiY5+nxQW6TiJAlLwVI1l53xUOrnlZWtVBVQzuWA==
dependencies:
eslint-config-airbnb-base "^15.0.0"
eslint-config-airbnb@^19.0.4:
version "19.0.4"
resolved "https://registry.npmjs.org/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz"
integrity sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew==
dependencies:
eslint-config-airbnb-base "^15.0.0"
object.assign "^4.1.2"
object.entries "^1.1.5"
eslint-config-prettier@^8.3.0:
version "8.8.0"
resolved "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz"
integrity sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==
eslint-config-standard@^16.0.3:
version "16.0.3"
resolved "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz"
integrity sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==
eslint-import-resolver-node@^0.3.7:
version "0.3.7"
resolved "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz"
@@ -9070,14 +9034,6 @@ eslint-module-utils@^2.8.0:
dependencies:
debug "^3.2.7"
eslint-plugin-es@^3.0.0:
version "3.0.1"
resolved "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz"
integrity sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==
dependencies:
eslint-utils "^2.0.0"
regexpp "^3.0.0"
eslint-plugin-import@^2.28.1:
version "2.28.1"
resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz#63b8b5b3c409bfc75ebaf8fb206b07ab435482c4"
@@ -9130,18 +9086,6 @@ eslint-plugin-jsx-a11y@^6.5.1:
object.fromentries "^2.0.6"
semver "^6.3.0"
eslint-plugin-node@^11.1.0:
version "11.1.0"
resolved "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz"
integrity sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==
dependencies:
eslint-plugin-es "^3.0.0"
eslint-utils "^2.0.0"
ignore "^5.1.1"
minimatch "^3.0.4"
resolve "^1.10.1"
semver "^6.1.0"
eslint-plugin-prettier@^4.0.0:
version "4.2.1"
resolved "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz"
@@ -9149,11 +9093,6 @@ eslint-plugin-prettier@^4.0.0:
dependencies:
prettier-linter-helpers "^1.0.0"
eslint-plugin-promise@^5.1.0:
version "5.2.0"
resolved "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.2.0.tgz"
integrity sha512-SftLb1pUG01QYq2A/hGAWfDRXqYD82zE7j7TopDOyNdU+7SvvoXREls/+PRTY17vUXzXnZA/zfnyKgRH6x4JJw==
eslint-plugin-react-hooks@^4.3.0:
version "4.6.0"
resolved "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz"
@@ -9198,7 +9137,7 @@ eslint-scope@5.1.1, eslint-scope@^5.1.1:
esrecurse "^4.3.0"
estraverse "^4.1.1"
eslint-utils@^2.0.0, eslint-utils@^2.1.0:
eslint-utils@^2.1.0:
version "2.1.0"
resolved "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz"
integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==
@@ -10733,7 +10672,7 @@ ignore@^4.0.6:
resolved "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz"
integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==
ignore@^5.1.1, ignore@^5.1.8, ignore@^5.2.0:
ignore@^5.1.8, ignore@^5.2.0:
version "5.2.4"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324"
integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==
@@ -13693,7 +13632,7 @@ object-keys@^1.1.1:
resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz"
integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4:
object.assign@^4.1.3, object.assign@^4.1.4:
version "4.1.4"
resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz"
integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==
@@ -13703,7 +13642,7 @@ object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4:
has-symbols "^1.0.3"
object-keys "^1.1.1"
object.entries@^1.1.5, object.entries@^1.1.6:
object.entries@^1.1.6:
version "1.1.6"
resolved "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz"
integrity sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==
@@ -15814,7 +15753,7 @@ regexp.prototype.flags@^1.4.3, regexp.prototype.flags@^1.5.0:
define-properties "^1.2.0"
functions-have-names "^1.2.3"
regexpp@^3.0.0, regexpp@^3.1.0:
regexpp@^3.1.0:
version "3.2.0"
resolved "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz"
integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==
@@ -16095,7 +16034,7 @@ resolve.exports@^1.1.0:
resolved "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.1.tgz"
integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==
resolve@^1.10.0, resolve@^1.10.1, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1:
resolve@^1.10.0, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1:
version "1.22.2"
resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz"
integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==
@@ -16343,7 +16282,7 @@ selfsigned@^2.4.1:
"@types/node-forge" "^1.3.0"
node-forge "^1"
"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.6.3:
"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.6.3:
version "7.5.4"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e"
integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==

View File

@@ -0,0 +1,150 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addGatewayRoutes(router *mux.Router) error {
if err := router.Handle("/api/v2/gateway/ingestion_keys", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.GetIngestionKeys), handler.OpenAPIDef{
ID: "GetIngestionKeys",
Tags: []string{"gateway"},
Summary: "Get ingestion keys for workspace",
Description: "This endpoint returns the ingestion keys for a workspace",
Request: nil,
RequestContentType: "",
Response: new(gatewaytypes.GettableIngestionKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys/search", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.SearchIngestionKeys), handler.OpenAPIDef{
ID: "SearchIngestionKeys",
Tags: []string{"gateway"},
Summary: "Search ingestion keys for workspace",
Description: "This endpoint returns the ingestion keys for a workspace",
Request: nil,
RequestContentType: "",
Response: new(gatewaytypes.GettableIngestionKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.CreateIngestionKey), handler.OpenAPIDef{
ID: "CreateIngestionKey",
Tags: []string{"gateway"},
Summary: "Create ingestion key for workspace",
Description: "This endpoint creates an ingestion key for the workspace",
Request: new(gatewaytypes.PostableIngestionKey),
RequestContentType: "application/json",
Response: new(gatewaytypes.GettableCreatedIngestionKey),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys/{keyId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.UpdateIngestionKey), handler.OpenAPIDef{
ID: "UpdateIngestionKey",
Tags: []string{"gateway"},
Summary: "Update ingestion key for workspace",
Description: "This endpoint updates an ingestion key for the workspace",
Request: new(gatewaytypes.PostableIngestionKey),
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPatch).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys/{keyId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.DeleteIngestionKey), handler.OpenAPIDef{
ID: "DeleteIngestionKey",
Tags: []string{"gateway"},
Summary: "Delete ingestion key for workspace",
Description: "This endpoint deletes an ingestion key for the workspace",
Request: nil,
RequestContentType: "",
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodDelete).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys/{keyId}/limits", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.CreateIngestionKeyLimit), handler.OpenAPIDef{
ID: "CreateIngestionKeyLimit",
Tags: []string{"gateway"},
Summary: "Create limit for the ingestion key",
Description: "This endpoint creates an ingestion key limit",
Request: new(gatewaytypes.PostableIngestionKeyLimit),
RequestContentType: "application/json",
Response: new(gatewaytypes.GettableCreatedIngestionKeyLimit),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys/limits/{limitId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.UpdateIngestionKeyLimit), handler.OpenAPIDef{
ID: "UpdateIngestionKeyLimit",
Tags: []string{"gateway"},
Summary: "Update limit for the ingestion key",
Description: "This endpoint updates an ingestion key limit",
Request: new(gatewaytypes.UpdatableIngestionKeyLimit),
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPatch).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/gateway/ingestion_keys/limits/{limitId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.DeleteIngestionKeyLimit), handler.OpenAPIDef{
ID: "DeleteIngestionKeyLimit",
Tags: []string{"gateway"},
Summary: "Delete limit for the ingestion key",
Description: "This endpoint deletes an ingestion key limit",
Request: nil,
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodDelete).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
@@ -39,6 +40,7 @@ type provider struct {
dashboardModule dashboard.Module
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
}
func NewFactory(
@@ -55,9 +57,10 @@ func NewFactory(
dashboardModule dashboard.Module,
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler)
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler)
})
}
@@ -78,6 +81,7 @@ func newProvider(
dashboardModule dashboard.Module,
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -97,6 +101,7 @@ func newProvider(
dashboardModule: dashboardModule,
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -153,6 +158,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addGatewayRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -4,14 +4,9 @@ import (
"net/url"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
)
var (
ErrCodeInvalidGatewayConfig = errors.MustNewCode("invalid_gateway_config")
)
type Config struct {
URL *url.URL `mapstructure:"url"`
}

60
pkg/gateway/gateway.go Normal file
View File

@@ -0,0 +1,60 @@
package gateway
import (
"context"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
ErrCodeGatewayUnsupported = errors.MustNewCode("gateway_unsupported")
ErrCodeInvalidGatewayConfig = errors.MustNewCode("invalid_gateway_config")
)
type Gateway interface {
// Get Ingestions Keys (this is supposed to be for the current user but for now in gateway code this is ignoring the consumer user)
GetIngestionKeys(ctx context.Context, orgID valuer.UUID, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error)
// Search Ingestion Keys by Name (this is supposed to be for the current user but for now in gateway code this is ignoring the consumer user)
SearchIngestionKeysByName(ctx context.Context, orgID valuer.UUID, name string, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error)
// Create Ingestion Key
CreateIngestionKey(ctx context.Context, orgID valuer.UUID, name string, tags []string, expiresAt time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error)
// Update Ingestion Key
UpdateIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string, name string, tags []string, expiresAt time.Time) error
// Delete Ingestion Key
DeleteIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string) error
// Create Ingestion Key Limit
CreateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, keyID string, signal string, limitConfig gatewaytypes.LimitConfig, tags []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error)
// Update Ingestion Key Limit
UpdateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string, limitConfig gatewaytypes.LimitConfig, tags []string) error
// Delete Ingestion Key Limit
DeleteIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string) error
}
type Handler interface {
GetIngestionKeys(http.ResponseWriter, *http.Request)
SearchIngestionKeys(http.ResponseWriter, *http.Request)
CreateIngestionKey(http.ResponseWriter, *http.Request)
UpdateIngestionKey(http.ResponseWriter, *http.Request)
DeleteIngestionKey(http.ResponseWriter, *http.Request)
CreateIngestionKeyLimit(http.ResponseWriter, *http.Request)
UpdateIngestionKeyLimit(http.ResponseWriter, *http.Request)
DeleteIngestionKeyLimit(http.ResponseWriter, *http.Request)
}

287
pkg/gateway/handler.go Normal file
View File

@@ -0,0 +1,287 @@
package gateway
import (
"encoding/json"
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
const (
DefaultPage = 1
DefaultPageSize = 10
)
type handler struct {
gateway Gateway
}
func NewHandler(gateway Gateway) Handler {
return &handler{
gateway: gateway,
}
}
func (handler *handler) GetIngestionKeys(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
pageString := r.URL.Query().Get("page")
perPageString := r.URL.Query().Get("per_page")
page, err := parseIntWithDefaultValue(pageString, DefaultPage)
if err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "page must be a valid integer"))
return
}
perPage, err := parseIntWithDefaultValue(perPageString, DefaultPageSize)
if err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "per_page must be a valid integer"))
return
}
response, err := handler.gateway.GetIngestionKeys(ctx, orgID, page, perPage)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) SearchIngestionKeys(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
pageString := r.URL.Query().Get("page")
perPageString := r.URL.Query().Get("per_page")
name := r.URL.Query().Get("name")
page, err := parseIntWithDefaultValue(pageString, DefaultPage)
if err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "page must be a valid integer"))
return
}
perPage, err := parseIntWithDefaultValue(perPageString, DefaultPageSize)
if err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "per_page must be a valid integer"))
return
}
response, err := handler.gateway.SearchIngestionKeysByName(ctx, orgID, name, page, perPage)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) CreateIngestionKey(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
var req gatewaytypes.PostableIngestionKey
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
return
}
response, err := handler.gateway.CreateIngestionKey(ctx, orgID, req.Name, req.Tags, req.ExpiresAt)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) UpdateIngestionKey(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
keyID := mux.Vars(r)["keyId"]
if keyID == "" {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "keyId is required"))
return
}
var req gatewaytypes.PostableIngestionKey
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
return
}
err = handler.gateway.UpdateIngestionKey(ctx, orgID, keyID, req.Name, req.Tags, req.ExpiresAt)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (handler *handler) DeleteIngestionKey(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
keyID := mux.Vars(r)["keyId"]
if keyID == "" {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "keyId is required"))
return
}
err = handler.gateway.DeleteIngestionKey(ctx, orgID, keyID)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (handler *handler) CreateIngestionKeyLimit(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
keyID := mux.Vars(r)["keyId"]
if keyID == "" {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "keyId is required"))
return
}
var req gatewaytypes.PostableIngestionKeyLimit
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
return
}
response, err := handler.gateway.CreateIngestionKeyLimit(ctx, orgID, keyID, req.Signal, req.Config, req.Tags)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusCreated, response)
}
func (handler *handler) UpdateIngestionKeyLimit(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
limitID := mux.Vars(r)["limitId"]
if limitID == "" {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "limitId is required"))
return
}
var req gatewaytypes.UpdatableIngestionKeyLimit
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
return
}
err = handler.gateway.UpdateIngestionKeyLimit(ctx, orgID, limitID, req.Config, req.Tags)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func (handler *handler) DeleteIngestionKeyLimit(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
limitID := mux.Vars(r)["limitId"]
if limitID == "" {
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "limitId is required"))
return
}
err = handler.gateway.DeleteIngestionKeyLimit(ctx, orgID, limitID)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusNoContent, nil)
}
func parseIntWithDefaultValue(value string, defaultValue int) (int, error) {
if value == "" {
return defaultValue, nil
}
result, err := strconv.Atoi(value)
if err != nil {
return 0, err
}
return result, nil
}

View File

@@ -0,0 +1,56 @@
package noopgateway
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type provider struct{}
func NewProviderFactory() factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return factory.NewProviderFactory(factory.MustNewName("noop"), func(ctx context.Context, ps factory.ProviderSettings, c gateway.Config) (gateway.Gateway, error) {
return New(ctx, ps, c)
})
}
func New(_ context.Context, _ factory.ProviderSettings, _ gateway.Config) (gateway.Gateway, error) {
return &provider{}, nil
}
func (p *provider) GetIngestionKeys(_ context.Context, _ valuer.UUID, _, _ int) (*gatewaytypes.GettableIngestionKeys, error) {
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) SearchIngestionKeysByName(_ context.Context, _ valuer.UUID, _ string, _, _ int) (*gatewaytypes.GettableIngestionKeys, error) {
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) CreateIngestionKey(_ context.Context, _ valuer.UUID, _ string, _ []string, _ time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error) {
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) UpdateIngestionKey(_ context.Context, _ valuer.UUID, _ string, _ string, _ []string, _ time.Time) error {
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) DeleteIngestionKey(_ context.Context, _ valuer.UUID, _ string) error {
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) CreateIngestionKeyLimit(_ context.Context, _ valuer.UUID, _ string, _ string, _ gatewaytypes.LimitConfig, _ []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error) {
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) UpdateIngestionKeyLimit(_ context.Context, _ valuer.UUID, _ string, _ gatewaytypes.LimitConfig, _ []string) error {
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}
func (p *provider) DeleteIngestionKeyLimit(_ context.Context, _ valuer.UUID, _ string) error {
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
}

View File

@@ -16,8 +16,8 @@ func NewHandler(global global.Global) global.Handler {
return &handler{global: global}
}
func (handker *handler) GetConfig(rw http.ResponseWriter, r *http.Request) {
cfg := handker.global.GetConfig()
func (handler *handler) GetConfig(rw http.ResponseWriter, r *http.Request) {
cfg := handler.global.GetConfig()
render.Success(rw, http.StatusOK, types.NewGettableGlobalConfig(cfg.ExternalURL, cfg.IngestionURL))
}

View File

@@ -4,6 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -32,6 +33,8 @@ type Module interface {
// Get the IDP info of the domain provided.
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) *authtypes.AuthNProviderInfo
statsreporter.StatsCollector
}
type Handler interface {

View File

@@ -52,3 +52,25 @@ func (module *module) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*au
func (module *module) Update(ctx context.Context, domain *authtypes.AuthDomain) error {
return module.store.Update(ctx, domain)
}
func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
domains, err := module.store.ListByOrgID(ctx, orgID)
if err != nil {
return nil, err
}
stats := make(map[string]any)
for _, domain := range domains {
key := "authdomain." + domain.AuthDomainConfig().AuthNProvider.StringValue() + ".count"
if value, ok := stats[key]; ok {
stats[key] = value.(int64) + 1
} else {
stats[key] = int64(1)
}
}
stats["authdomain.count"] = len(domains)
return stats, nil
}

View File

@@ -3,6 +3,7 @@ package signoz
import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/global/signozglobal"
"github.com/SigNoz/signoz/pkg/licensing"
@@ -39,9 +40,10 @@ type Handlers struct {
MetricsExplorer metricsexplorer.Handler
Global global.Handler
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -54,5 +56,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
}
}

View File

@@ -42,7 +42,7 @@ func TestNewHandlers(t *testing.T) {
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/pkg/apiserver/signozapiserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/instrumentation"
@@ -47,6 +48,7 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ dashboard.Module }{},
struct{ dashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ gateway.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -241,6 +241,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
modules.Dashboard,
handlers.Dashboard,
handlers.MetricsExplorer,
handlers.GatewayHandler,
),
)
}

View File

@@ -15,6 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
@@ -71,6 +72,7 @@ type SigNoz struct {
Handlers Handlers
QueryParser queryparser.QueryParser
Flagger flagger.Flagger
Gateway gateway.Gateway
}
func New(
@@ -89,6 +91,7 @@ func New(
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Module, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
) (*SigNoz, error) {
// Initialize instrumentation
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
@@ -336,6 +339,12 @@ func New(
return nil, err
}
gatewayFactory := gatewayProviderFactory(licensing)
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
if err != nil {
return nil, err
}
// Initialize authns
store := sqlauthnstore.NewStore(sqlstore)
authNs, err := authNsCallback(ctx, providerSettings, store, licensing)
@@ -382,7 +391,7 @@ func New(
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboardModule)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(
@@ -406,6 +415,7 @@ func New(
licensing,
tokenizer,
config,
modules.AuthDomain,
}
// Initialize stats reporter from the available stats reporter provider factories
@@ -457,5 +467,6 @@ func New(
Handlers: handlers,
QueryParser: queryParser,
Flagger: flagger,
Gateway: gateway,
}, nil
}

View File

@@ -0,0 +1,86 @@
package gatewaytypes
import (
"time"
)
type IngestionKey struct {
ID string `json:"id"`
Name string `json:"name"`
Value string `json:"value"`
ExpiresAt time.Time `json:"expires_at"`
Tags []string `json:"tags"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
WorkspaceID string `json:"workspace_id"`
Limits []Limit `json:"limits"`
}
type Limit struct {
ID string `json:"id"`
Signal string `json:"signal"` // "logs", "traces", "metrics"
Config LimitConfig `json:"config"`
Tags []string `json:"tags"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
KeyID string `json:"key_id"`
Metric LimitMetric `json:"metric"`
}
type LimitConfig struct {
Day *LimitValue `json:"day,omitempty"`
Second *LimitValue `json:"second,omitempty"`
}
type LimitValue struct {
Size int64 `json:"size,omitempty"`
Count int64 `json:"count,omitempty"`
}
type LimitMetric struct {
Day *LimitMetricValue `json:"day,omitempty"`
Second *LimitMetricValue `json:"second,omitempty"`
}
type LimitMetricValue struct {
Count int64 `json:"count"`
Size int64 `json:"size"`
}
type Pagination struct {
Page int `json:"page"`
PerPage int `json:"per_page"`
Pages int `json:"pages"`
Total int `json:"total"`
}
type GettableIngestionKeys struct {
Keys []IngestionKey `json:"keys"`
Pagination Pagination `json:"_pagination"`
}
type PostableIngestionKey struct {
Name string `json:"name"`
Tags []string `json:"tags"`
ExpiresAt time.Time `json:"expires_at"`
}
type GettableCreatedIngestionKey struct {
ID string `json:"id"`
Value string `json:"value"`
}
type PostableIngestionKeyLimit struct {
Signal string `json:"signal"`
Config LimitConfig `json:"config"`
Tags []string `json:"tags"`
}
type GettableCreatedIngestionKeyLimit struct {
ID string `json:"id"`
}
type UpdatableIngestionKeyLimit struct {
Config LimitConfig `json:"config"`
Tags []string `json:"tags"`
}

View File

@@ -66,6 +66,7 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
"SIGNOZ_GATEWAY_URL": gateway.container_configs["8080"].base(),
"SIGNOZ_TOKENIZER_JWT_SECRET": "secret",
"SIGNOZ_GLOBAL_INGESTION__URL": "https://ingest.test.signoz.cloud"
}
| sqlstore.env
| clickhouse.env