mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-18 10:42:14 +00:00
Compare commits
7 Commits
tvats-move
...
fix/identn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eac3673e44 | ||
|
|
fd19ff8e5e | ||
|
|
7b9e93162f | ||
|
|
f106f57097 | ||
|
|
5bafdeb373 | ||
|
|
24b72084ac | ||
|
|
2db83b453d |
10
.github/workflows/goci.yaml
vendored
10
.github/workflows/goci.yaml
vendored
@@ -102,13 +102,3 @@ jobs:
|
||||
run: |
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
||||
- name: node-install
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: install-frontend
|
||||
run: cd frontend && yarn install
|
||||
- name: generate-api-clients
|
||||
run: |
|
||||
cd frontend && yarn generate:api
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)
|
||||
|
||||
51
.github/workflows/jsci.yaml
vendored
51
.github/workflows/jsci.yaml
vendored
@@ -52,16 +52,16 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
md-languages:
|
||||
languages:
|
||||
if: |
|
||||
github.event_name == 'merge_group' ||
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: validate md languages
|
||||
- name: run
|
||||
run: bash frontend/scripts/validate-md-languages.sh
|
||||
authz:
|
||||
if: |
|
||||
@@ -70,44 +70,55 @@ jobs:
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
- name: node-install
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install frontend dependencies
|
||||
- name: deps-install
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
yarn install
|
||||
|
||||
- name: Install uv
|
||||
- name: uv-install
|
||||
uses: astral-sh/setup-uv@v5
|
||||
|
||||
- name: Install Python dependencies
|
||||
- name: uv-deps
|
||||
working-directory: ./tests/integration
|
||||
run: |
|
||||
uv sync
|
||||
|
||||
- name: Start test environment
|
||||
- name: setup-test
|
||||
run: |
|
||||
make py-test-setup
|
||||
|
||||
- name: Generate permissions.type.ts
|
||||
- name: generate
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
yarn generate:permissions-type
|
||||
|
||||
- name: Teardown test environment
|
||||
- name: teardown-test
|
||||
if: always()
|
||||
run: |
|
||||
make py-test-teardown
|
||||
|
||||
- name: Check for changes
|
||||
- name: validate
|
||||
run: |
|
||||
if ! git diff --exit-code frontend/src/hooks/useAuthZ/permissions.type.ts; then
|
||||
echo "::error::frontend/src/hooks/useAuthZ/permissions.type.ts is out of date. Please run the generator locally and commit the changes: npm run generate:permissions-type (from the frontend directory)"
|
||||
exit 1
|
||||
fi
|
||||
openapi:
|
||||
if: |
|
||||
github.event_name == 'merge_group' ||
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: node-install
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: install-frontend
|
||||
run: cd frontend && yarn install
|
||||
- name: generate-api-clients
|
||||
run: |
|
||||
cd frontend && yarn generate:api
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)
|
||||
|
||||
@@ -308,6 +308,9 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
invite:
|
||||
# The duration within which a user can accept their invite.
|
||||
max_token_lifetime: 48h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
|
||||
@@ -190,7 +190,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.115.0
|
||||
image: signoz/signoz:v0.116.0
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.115.0
|
||||
image: signoz/signoz:v0.116.0
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
volumes:
|
||||
|
||||
@@ -181,7 +181,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.115.0}
|
||||
image: signoz/signoz:${VERSION:-v0.116.0}
|
||||
container_name: signoz
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
|
||||
@@ -109,7 +109,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.115.0}
|
||||
image: signoz/signoz:${VERSION:-v0.116.0}
|
||||
container_name: signoz
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
|
||||
@@ -220,6 +220,13 @@ components:
|
||||
- additions
|
||||
- deletions
|
||||
type: object
|
||||
AuthtypesPatchableRole:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
required:
|
||||
- description
|
||||
type: object
|
||||
AuthtypesPostableAuthDomain:
|
||||
properties:
|
||||
config:
|
||||
@@ -236,6 +243,15 @@ components:
|
||||
password:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesPostableRole:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
AuthtypesPostableRotateToken:
|
||||
properties:
|
||||
refreshToken:
|
||||
@@ -251,6 +267,31 @@ components:
|
||||
- name
|
||||
- type
|
||||
type: object
|
||||
AuthtypesRole:
|
||||
properties:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
orgId:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
- description
|
||||
- type
|
||||
- orgId
|
||||
type: object
|
||||
AuthtypesRoleMapping:
|
||||
properties:
|
||||
defaultRole:
|
||||
@@ -1722,47 +1763,6 @@ components:
|
||||
- status
|
||||
- error
|
||||
type: object
|
||||
RoletypesPatchableRole:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
required:
|
||||
- description
|
||||
type: object
|
||||
RoletypesPostableRole:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
RoletypesRole:
|
||||
properties:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
orgId:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
- description
|
||||
- type
|
||||
- orgId
|
||||
type: object
|
||||
ServiceaccounttypesFactorAPIKey:
|
||||
properties:
|
||||
createdAt:
|
||||
@@ -4234,7 +4234,7 @@ paths:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/RoletypesRole'
|
||||
$ref: '#/components/schemas/AuthtypesRole'
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
@@ -4277,7 +4277,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RoletypesPostableRole'
|
||||
$ref: '#/components/schemas/AuthtypesPostableRole'
|
||||
responses:
|
||||
"201":
|
||||
content:
|
||||
@@ -4422,7 +4422,7 @@ paths:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/RoletypesRole'
|
||||
$ref: '#/components/schemas/AuthtypesRole'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
@@ -4470,7 +4470,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RoletypesPatchableRole'
|
||||
$ref: '#/components/schemas/AuthtypesPatchableRole'
|
||||
responses:
|
||||
"204":
|
||||
content:
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
@@ -23,7 +22,7 @@ type provider struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
openfgaServer *openfgaserver.Server
|
||||
licensing licensing.Licensing
|
||||
store roletypes.Store
|
||||
store authtypes.RoleStore
|
||||
registry []authz.RegisterTypeable
|
||||
}
|
||||
|
||||
@@ -82,23 +81,23 @@ func (provider *provider) Write(ctx context.Context, additions []*openfgav1.Tupl
|
||||
return provider.openfgaServer.Write(ctx, additions, deletions)
|
||||
}
|
||||
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.Role, error) {
|
||||
return provider.pkgAuthzService.Get(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*authtypes.Role, error) {
|
||||
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
|
||||
}
|
||||
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*authtypes.Role, error) {
|
||||
return provider.pkgAuthzService.List(ctx, orgID)
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*authtypes.Role, error) {
|
||||
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.Role, error) {
|
||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*authtypes.Role, error) {
|
||||
return provider.pkgAuthzService.ListByOrgIDAndIDs(ctx, orgID, ids)
|
||||
}
|
||||
|
||||
@@ -114,7 +113,7 @@ func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names [
|
||||
return provider.pkgAuthzService.Revoke(ctx, orgID, names, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*authtypes.Role) error {
|
||||
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
|
||||
}
|
||||
|
||||
@@ -136,16 +135,16 @@ func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context,
|
||||
return provider.Write(ctx, tuples, nil)
|
||||
}
|
||||
|
||||
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
return provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
||||
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) (*authtypes.Role, error) {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
@@ -159,10 +158,10 @@ func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, ro
|
||||
}
|
||||
|
||||
if existingRole != nil {
|
||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
||||
return authtypes.NewRoleFromStorableRole(existingRole), nil
|
||||
}
|
||||
|
||||
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
err = provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -217,13 +216,13 @@ func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
||||
return provider.store.Update(ctx, orgID, authtypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||
@@ -232,12 +231,12 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
additionTuples, err := authtypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
deletionTuples, err := authtypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -261,7 +260,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
|
||||
return err
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
role := authtypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.ErrIfManaged()
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -271,7 +270,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
|
||||
}
|
||||
|
||||
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, authtypes.TypeableResourcesRoles}
|
||||
}
|
||||
|
||||
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
@@ -283,7 +282,7 @@ func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID va
|
||||
adminSubject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAdminRoleName),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
@@ -298,7 +297,7 @@ func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID va
|
||||
anonymousSubject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAnonymousRoleName),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -224,7 +223,7 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
|
||||
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
|
||||
return map[string][]*authtypes.Transaction{
|
||||
roletypes.SigNozAnonymousRoleName: {
|
||||
authtypes.SigNozAnonymousRoleName: {
|
||||
{
|
||||
ID: valuer.GenerateUUID(),
|
||||
Relation: authtypes.RelationRead,
|
||||
|
||||
@@ -80,6 +80,21 @@ func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
|
||||
alertDataRows := cmock.NewRows(cols, tc.Values)
|
||||
|
||||
mock := telemetryStore.Mock()
|
||||
// Mock metadata queries for FetchTemporalityAndTypeMulti
|
||||
// First query: fetchMetricsTemporalityAndType (from signoz_metrics time series table)
|
||||
metadataCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "temporality", Type: "String"},
|
||||
{Name: "type", Type: "String"},
|
||||
{Name: "is_monotonic", Type: "Bool"},
|
||||
}
|
||||
metadataRows := cmock.NewRows(metadataCols, [][]any{
|
||||
{"probe_success", metrictypes.Unspecified, metrictypes.GaugeType, false},
|
||||
})
|
||||
mock.ExpectQuery("*distributed_time_series_v4*").WithArgs(nil, nil, nil).WillReturnRows(metadataRows)
|
||||
// Second query: fetchMeterSourceMetricsTemporalityAndType (from signoz_meter table)
|
||||
emptyMetadataRows := cmock.NewRows(metadataCols, [][]any{})
|
||||
mock.ExpectQuery("*meter*").WithArgs(nil).WillReturnRows(emptyMetadataRows)
|
||||
|
||||
// Generate query arguments for the metric query
|
||||
evalTime := time.Now().UTC()
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
"prettify": "prettier --write .",
|
||||
"fmt": "prettier --check .",
|
||||
"lint": "eslint ./src",
|
||||
"lint:generated": "eslint ./src/api/generated --fix",
|
||||
"lint:fix": "eslint ./src --fix",
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
@@ -283,4 +284,4 @@
|
||||
"tmp": "0.2.4",
|
||||
"vite": "npm:rolldown-vite@7.3.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,7 @@ echo "\n✅ Prettier formatting successful"
|
||||
|
||||
# Fix linting issues
|
||||
echo "\n\n---\nRunning eslint...\n"
|
||||
if ! yarn lint --fix --quiet src/api/generated; then
|
||||
if ! yarn lint:generated; then
|
||||
echo "ESLint check failed! Please fix linting errors before proceeding."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -21,6 +21,8 @@ import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
AuthtypesPatchableObjectsDTO,
|
||||
AuthtypesPatchableRoleDTO,
|
||||
AuthtypesPostableRoleDTO,
|
||||
CreateRole201,
|
||||
DeleteRolePathParameters,
|
||||
GetObjects200,
|
||||
@@ -31,8 +33,6 @@ import type {
|
||||
PatchObjectsPathParameters,
|
||||
PatchRolePathParameters,
|
||||
RenderErrorResponseDTO,
|
||||
RoletypesPatchableRoleDTO,
|
||||
RoletypesPostableRoleDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
/**
|
||||
@@ -118,14 +118,14 @@ export const invalidateListRoles = async (
|
||||
* @summary Create role
|
||||
*/
|
||||
export const createRole = (
|
||||
roletypesPostableRoleDTO: BodyType<RoletypesPostableRoleDTO>,
|
||||
authtypesPostableRoleDTO: BodyType<AuthtypesPostableRoleDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<CreateRole201>({
|
||||
url: `/api/v1/roles`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: roletypesPostableRoleDTO,
|
||||
data: authtypesPostableRoleDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
@@ -137,13 +137,13 @@ export const getCreateRoleMutationOptions = <
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
||||
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
||||
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['createRole'];
|
||||
@@ -157,7 +157,7 @@ export const getCreateRoleMutationOptions = <
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
{ data: BodyType<RoletypesPostableRoleDTO> }
|
||||
{ data: BodyType<AuthtypesPostableRoleDTO> }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
@@ -170,7 +170,7 @@ export const getCreateRoleMutationOptions = <
|
||||
export type CreateRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof createRole>>
|
||||
>;
|
||||
export type CreateRoleMutationBody = BodyType<RoletypesPostableRoleDTO>;
|
||||
export type CreateRoleMutationBody = BodyType<AuthtypesPostableRoleDTO>;
|
||||
export type CreateRoleMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
@@ -183,13 +183,13 @@ export const useCreateRole = <
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
||||
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
||||
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getCreateRoleMutationOptions(options);
|
||||
@@ -370,13 +370,13 @@ export const invalidateGetRole = async (
|
||||
*/
|
||||
export const patchRole = (
|
||||
{ id }: PatchRolePathParameters,
|
||||
roletypesPatchableRoleDTO: BodyType<RoletypesPatchableRoleDTO>,
|
||||
authtypesPatchableRoleDTO: BodyType<AuthtypesPatchableRoleDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/roles/${id}`,
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: roletypesPatchableRoleDTO,
|
||||
data: authtypesPatchableRoleDTO,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -389,7 +389,7 @@ export const getPatchRoleMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchRolePathParameters;
|
||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
||||
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -398,7 +398,7 @@ export const getPatchRoleMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchRolePathParameters;
|
||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
||||
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -415,7 +415,7 @@ export const getPatchRoleMutationOptions = <
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
{
|
||||
pathParams: PatchRolePathParameters;
|
||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
||||
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
@@ -429,7 +429,7 @@ export const getPatchRoleMutationOptions = <
|
||||
export type PatchRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof patchRole>>
|
||||
>;
|
||||
export type PatchRoleMutationBody = BodyType<RoletypesPatchableRoleDTO>;
|
||||
export type PatchRoleMutationBody = BodyType<AuthtypesPatchableRoleDTO>;
|
||||
export type PatchRoleMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
@@ -444,7 +444,7 @@ export const usePatchRole = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchRolePathParameters;
|
||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
||||
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -453,7 +453,7 @@ export const usePatchRole = <
|
||||
TError,
|
||||
{
|
||||
pathParams: PatchRolePathParameters;
|
||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
||||
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
|
||||
@@ -278,6 +278,13 @@ export interface AuthtypesPatchableObjectsDTO {
|
||||
deletions: AuthtypesGettableObjectsDTO[] | null;
|
||||
}
|
||||
|
||||
export interface AuthtypesPatchableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesPostableAuthDomainDTO {
|
||||
config?: AuthtypesAuthDomainConfigDTO;
|
||||
/**
|
||||
@@ -301,6 +308,17 @@ export interface AuthtypesPostableEmailPasswordSessionDTO {
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesPostableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesPostableRotateTokenDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -319,6 +337,39 @@ export interface AuthtypesResourceDTO {
|
||||
type: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
@@ -2039,57 +2090,6 @@ export interface RenderErrorResponseDTO {
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface RoletypesPatchableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description: string;
|
||||
}
|
||||
|
||||
export interface RoletypesPostableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface RoletypesRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ServiceaccounttypesFactorAPIKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -3163,7 +3163,7 @@ export type ListRoles200 = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
data: RoletypesRoleDTO[];
|
||||
data: AuthtypesRoleDTO[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -3185,7 +3185,7 @@ export type GetRolePathParameters = {
|
||||
id: string;
|
||||
};
|
||||
export type GetRole200 = {
|
||||
data: RoletypesRoleDTO;
|
||||
data: AuthtypesRoleDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
@@ -8,32 +8,42 @@ export const downloadExportData = async (
|
||||
props: ExportRawDataProps,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const response = await axios.post<Blob>(
|
||||
`export_raw_data?format=${encodeURIComponent(props.format)}`,
|
||||
props.body,
|
||||
{
|
||||
responseType: 'blob',
|
||||
decompress: true,
|
||||
headers: {
|
||||
Accept: 'application/octet-stream',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
timeout: 0,
|
||||
},
|
||||
);
|
||||
const queryParams = new URLSearchParams();
|
||||
|
||||
queryParams.append('start', String(props.start));
|
||||
queryParams.append('end', String(props.end));
|
||||
queryParams.append('filter', props.filter);
|
||||
props.columns.forEach((col) => {
|
||||
queryParams.append('columns', col);
|
||||
});
|
||||
queryParams.append('order_by', props.orderBy);
|
||||
queryParams.append('limit', String(props.limit));
|
||||
queryParams.append('format', props.format);
|
||||
|
||||
const response = await axios.get<Blob>(`export_raw_data?${queryParams}`, {
|
||||
responseType: 'blob', // Important: tell axios to handle response as blob
|
||||
decompress: true, // Enable automatic decompression
|
||||
headers: {
|
||||
Accept: 'application/octet-stream', // Tell server we expect binary data
|
||||
},
|
||||
timeout: 0,
|
||||
});
|
||||
|
||||
// Only proceed if the response status is 200
|
||||
if (response.status !== 200) {
|
||||
throw new Error(
|
||||
`Failed to download data: server returned status ${response.status}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Create blob URL from response data
|
||||
const blob = new Blob([response.data], { type: 'application/octet-stream' });
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
|
||||
// Create and configure download link
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
|
||||
// Get filename from Content-Disposition header or generate timestamped default
|
||||
const filename =
|
||||
response.headers['content-disposition']
|
||||
?.split('filename=')[1]
|
||||
@@ -41,6 +51,7 @@ export const downloadExportData = async (
|
||||
|
||||
link.setAttribute('download', filename);
|
||||
|
||||
// Trigger download
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
link.remove();
|
||||
|
||||
@@ -1,323 +0,0 @@
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { Provider } from 'react-redux';
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import { message } from 'antd';
|
||||
import configureStore from 'redux-mock-store';
|
||||
import store from 'store';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { DownloadFormats, DownloadRowCounts } from './constants';
|
||||
import DownloadOptionsMenu from './DownloadOptionsMenu';
|
||||
|
||||
const mockDownloadExportData = jest.fn().mockResolvedValue(undefined);
|
||||
jest.mock('api/v1/download/downloadExportData', () => ({
|
||||
downloadExportData: (...args: any[]): any => mockDownloadExportData(...args),
|
||||
default: (...args: any[]): any => mockDownloadExportData(...args),
|
||||
}));
|
||||
|
||||
jest.mock('antd', () => {
|
||||
const actual = jest.requireActual('antd');
|
||||
return {
|
||||
...actual,
|
||||
message: {
|
||||
success: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const mockStore = configureStore([]);
|
||||
const createMockReduxStore = (): any =>
|
||||
mockStore({
|
||||
...store.getState(),
|
||||
});
|
||||
|
||||
const createMockStagedQuery = (dataSource: DataSource): Query => ({
|
||||
id: 'test-query-id',
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
queryName: 'A',
|
||||
dataSource,
|
||||
aggregateOperator: StringOperators.NOOP,
|
||||
aggregateAttribute: {
|
||||
id: '',
|
||||
dataType: '' as any,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
aggregations: [{ expression: 'count()' }],
|
||||
functions: [],
|
||||
filter: { expression: 'status = 200' },
|
||||
filters: { items: [], op: 'AND' },
|
||||
groupBy: [],
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
having: { expression: '' } as any,
|
||||
limit: null,
|
||||
stepInterval: null,
|
||||
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
|
||||
legend: '',
|
||||
selectColumns: [],
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
});
|
||||
|
||||
const renderWithStore = (
|
||||
stagedQuery: Query | null,
|
||||
dataSource: DataSource,
|
||||
): void => {
|
||||
const mockReduxStore = createMockReduxStore();
|
||||
render(
|
||||
<Provider store={mockReduxStore}>
|
||||
<DownloadOptionsMenu stagedQuery={stagedQuery} dataSource={dataSource} />
|
||||
</Provider>,
|
||||
);
|
||||
};
|
||||
|
||||
describe.each([
|
||||
[DataSource.LOGS, 'logs'],
|
||||
[DataSource.TRACES, 'traces'],
|
||||
])('DownloadOptionsMenu for %s', (dataSource, signal) => {
|
||||
const testId = `periscope-btn-download-${dataSource}`;
|
||||
|
||||
beforeEach(() => {
|
||||
mockDownloadExportData.mockReset().mockResolvedValue(undefined);
|
||||
(message.success as jest.Mock).mockReset();
|
||||
(message.error as jest.Mock).mockReset();
|
||||
});
|
||||
|
||||
it('renders download button', () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
const button = screen.getByTestId(testId);
|
||||
expect(button).toBeInTheDocument();
|
||||
expect(button).toHaveClass('periscope-btn', 'ghost');
|
||||
});
|
||||
|
||||
it('shows popover with export options when download button is clicked', () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||
expect(screen.getByText('FORMAT')).toBeInTheDocument();
|
||||
expect(screen.getByText('Number of Rows')).toBeInTheDocument();
|
||||
expect(screen.getByText('Columns')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('allows changing export format', () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
|
||||
const csvRadio = screen.getByRole('radio', { name: 'csv' });
|
||||
const jsonlRadio = screen.getByRole('radio', { name: 'jsonl' });
|
||||
|
||||
expect(csvRadio).toBeChecked();
|
||||
fireEvent.click(jsonlRadio);
|
||||
expect(jsonlRadio).toBeChecked();
|
||||
expect(csvRadio).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('allows changing row limit', () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
|
||||
const tenKRadio = screen.getByRole('radio', { name: '10k' });
|
||||
const fiftyKRadio = screen.getByRole('radio', { name: '50k' });
|
||||
|
||||
expect(tenKRadio).toBeChecked();
|
||||
fireEvent.click(fiftyKRadio);
|
||||
expect(fiftyKRadio).toBeChecked();
|
||||
expect(tenKRadio).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('allows changing columns scope', () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
|
||||
const allColumnsRadio = screen.getByRole('radio', { name: 'All' });
|
||||
const selectedColumnsRadio = screen.getByRole('radio', { name: 'Selected' });
|
||||
|
||||
expect(allColumnsRadio).toBeChecked();
|
||||
fireEvent.click(selectedColumnsRadio);
|
||||
expect(selectedColumnsRadio).toBeChecked();
|
||||
expect(allColumnsRadio).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('calls downloadExportData with correct format and POST body', async () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||
expect(callArgs.format).toBe(DownloadFormats.CSV);
|
||||
expect(callArgs.body).toBeDefined();
|
||||
expect(callArgs.body.requestType).toBe('raw');
|
||||
expect(callArgs.body.compositeQuery.queries).toHaveLength(1);
|
||||
|
||||
const query = callArgs.body.compositeQuery.queries[0];
|
||||
expect(query.type).toBe('builder_query');
|
||||
expect(query.spec.signal).toBe(signal);
|
||||
expect(query.spec.limit).toBe(DownloadRowCounts.TEN_K);
|
||||
});
|
||||
});
|
||||
|
||||
it('clears groupBy and having in the export payload', async () => {
|
||||
const mockQuery = createMockStagedQuery(dataSource);
|
||||
mockQuery.builder.queryData[0].groupBy = [
|
||||
{ key: 'service', dataType: 'string' as any, type: '' },
|
||||
];
|
||||
mockQuery.builder.queryData[0].having = {
|
||||
expression: 'count() > 10',
|
||||
} as any;
|
||||
|
||||
renderWithStore(mockQuery, dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||
const query = callArgs.body.compositeQuery.queries[0];
|
||||
expect(query.spec.groupBy).toBeUndefined();
|
||||
expect(query.spec.having).toEqual({ expression: '' });
|
||||
});
|
||||
});
|
||||
|
||||
it('keeps selectColumns when column scope is Selected', async () => {
|
||||
const mockQuery = createMockStagedQuery(dataSource);
|
||||
mockQuery.builder.queryData[0].selectColumns = [
|
||||
{ name: 'http.status', fieldDataType: 'int64', fieldContext: 'attribute' },
|
||||
] as any;
|
||||
|
||||
renderWithStore(mockQuery, dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||
const query = callArgs.body.compositeQuery.queries[0];
|
||||
expect(query.spec.selectFields).toEqual([
|
||||
expect.objectContaining({
|
||||
name: 'http.status',
|
||||
fieldDataType: 'int64',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('sends no selectFields when column scope is All', async () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByRole('radio', { name: 'All' }));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||
const query = callArgs.body.compositeQuery.queries[0];
|
||||
expect(query.spec.selectFields).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('handles successful export with success message', async () => {
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(message.success).toHaveBeenCalledWith(
|
||||
'Export completed successfully',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles export failure with error message', async () => {
|
||||
mockDownloadExportData.mockRejectedValueOnce(new Error('Server error'));
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(message.error).toHaveBeenCalledWith(
|
||||
`Failed to export ${dataSource}. Please try again.`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles UI state correctly during export process', async () => {
|
||||
let resolveDownload: () => void;
|
||||
mockDownloadExportData.mockImplementationOnce(
|
||||
() =>
|
||||
new Promise<void>((resolve) => {
|
||||
resolveDownload = resolve;
|
||||
}),
|
||||
);
|
||||
renderWithStore(createMockStagedQuery(dataSource), dataSource);
|
||||
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
expect(screen.getByTestId(testId)).toBeDisabled();
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
||||
|
||||
resolveDownload!();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId(testId)).not.toBeDisabled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DownloadOptionsMenu for traces with queryTraceOperator', () => {
|
||||
const dataSource = DataSource.TRACES;
|
||||
const testId = `periscope-btn-download-${dataSource}`;
|
||||
|
||||
beforeEach(() => {
|
||||
mockDownloadExportData.mockReset().mockResolvedValue(undefined);
|
||||
(message.success as jest.Mock).mockReset();
|
||||
});
|
||||
|
||||
it('applies limit and clears groupBy on queryTraceOperator entries', async () => {
|
||||
const query = createMockStagedQuery(dataSource);
|
||||
query.builder.queryTraceOperator = [
|
||||
{
|
||||
...query.builder.queryData[0],
|
||||
queryName: 'TraceOp1',
|
||||
expression: 'TraceOp1',
|
||||
groupBy: [{ key: 'service', dataType: 'string' as any, type: '' }],
|
||||
},
|
||||
];
|
||||
|
||||
renderWithStore(query, dataSource);
|
||||
fireEvent.click(screen.getByTestId(testId));
|
||||
fireEvent.click(screen.getByRole('radio', { name: '50k' }));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||
const queries = callArgs.body.compositeQuery.queries;
|
||||
const traceOpQuery = queries.find((q: any) => q.spec.name === 'TraceOp1');
|
||||
if (traceOpQuery) {
|
||||
expect(traceOpQuery.spec.limit).toBe(DownloadRowCounts.FIFTY_K);
|
||||
expect(traceOpQuery.spec.groupBy).toBeUndefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,189 +0,0 @@
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Button, message, Popover, Radio, Tooltip, Typography } from 'antd';
|
||||
import { downloadExportData } from 'api/v1/download/downloadExportData';
|
||||
import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { Download, DownloadIcon, Loader2 } from 'lucide-react';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
DownloadColumnsScopes,
|
||||
DownloadFormats,
|
||||
DownloadRowCounts,
|
||||
} from './constants';
|
||||
|
||||
import './DownloadOptionsMenu.styles.scss';
|
||||
|
||||
interface DownloadOptionsMenuProps {
|
||||
stagedQuery: Query | null;
|
||||
dataSource: DataSource;
|
||||
}
|
||||
|
||||
export default function DownloadOptionsMenu({
|
||||
stagedQuery,
|
||||
dataSource,
|
||||
}: DownloadOptionsMenuProps): JSX.Element {
|
||||
const [exportFormat, setExportFormat] = useState<string>(DownloadFormats.CSV);
|
||||
const [rowLimit, setRowLimit] = useState<number>(DownloadRowCounts.TEN_K);
|
||||
const [columnsScope, setColumnsScope] = useState<string>(
|
||||
DownloadColumnsScopes.ALL,
|
||||
);
|
||||
const [isDownloading, setIsDownloading] = useState<boolean>(false);
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const handleExportRawData = useCallback(async (): Promise<void> => {
|
||||
setIsPopoverOpen(false);
|
||||
if (!stagedQuery) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setIsDownloading(true);
|
||||
|
||||
const clearSelectColumns = columnsScope === DownloadColumnsScopes.ALL;
|
||||
|
||||
const exportQuery: Query = {
|
||||
...stagedQuery,
|
||||
builder: {
|
||||
...stagedQuery.builder,
|
||||
queryData: stagedQuery.builder.queryData.map((qd) => ({
|
||||
...qd,
|
||||
groupBy: [],
|
||||
having: { expression: '' },
|
||||
limit: rowLimit,
|
||||
...(clearSelectColumns && { selectColumns: [] }),
|
||||
})),
|
||||
queryTraceOperator: (stagedQuery.builder.queryTraceOperator || []).map(
|
||||
(traceOp) => ({
|
||||
...traceOp,
|
||||
groupBy: [],
|
||||
having: { expression: '' },
|
||||
limit: rowLimit,
|
||||
...(clearSelectColumns && { selectColumns: [] }),
|
||||
}),
|
||||
),
|
||||
},
|
||||
};
|
||||
|
||||
const { queryPayload } = prepareQueryRangePayloadV5({
|
||||
query: exportQuery,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
});
|
||||
|
||||
await downloadExportData({ format: exportFormat, body: queryPayload });
|
||||
message.success('Export completed successfully');
|
||||
} catch (error) {
|
||||
console.error(`Error exporting ${dataSource}:`, error);
|
||||
message.error(`Failed to export ${dataSource}. Please try again.`);
|
||||
} finally {
|
||||
setIsDownloading(false);
|
||||
}
|
||||
}, [
|
||||
stagedQuery,
|
||||
columnsScope,
|
||||
exportFormat,
|
||||
rowLimit,
|
||||
globalSelectedInterval,
|
||||
dataSource,
|
||||
]);
|
||||
|
||||
const popoverContent = useMemo(
|
||||
() => (
|
||||
<div
|
||||
className="export-options-container"
|
||||
role="dialog"
|
||||
aria-label="Export options"
|
||||
aria-modal="true"
|
||||
>
|
||||
<div className="export-format">
|
||||
<Typography.Text className="title">FORMAT</Typography.Text>
|
||||
<Radio.Group
|
||||
value={exportFormat}
|
||||
onChange={(e): void => setExportFormat(e.target.value)}
|
||||
>
|
||||
<Radio value={DownloadFormats.CSV}>csv</Radio>
|
||||
<Radio value={DownloadFormats.JSONL}>jsonl</Radio>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
|
||||
<div className="horizontal-line" />
|
||||
|
||||
<div className="row-limit">
|
||||
<Typography.Text className="title">Number of Rows</Typography.Text>
|
||||
<Radio.Group
|
||||
value={rowLimit}
|
||||
onChange={(e): void => setRowLimit(e.target.value)}
|
||||
>
|
||||
<Radio value={DownloadRowCounts.TEN_K}>10k</Radio>
|
||||
<Radio value={DownloadRowCounts.THIRTY_K}>30k</Radio>
|
||||
<Radio value={DownloadRowCounts.FIFTY_K}>50k</Radio>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
|
||||
<div className="horizontal-line" />
|
||||
|
||||
<div className="columns-scope">
|
||||
<Typography.Text className="title">Columns</Typography.Text>
|
||||
<Radio.Group
|
||||
value={columnsScope}
|
||||
onChange={(e): void => setColumnsScope(e.target.value)}
|
||||
>
|
||||
<Radio value={DownloadColumnsScopes.ALL}>All</Radio>
|
||||
<Radio value={DownloadColumnsScopes.SELECTED}>Selected</Radio>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<Download size={16} />}
|
||||
onClick={handleExportRawData}
|
||||
className="export-button"
|
||||
disabled={isDownloading}
|
||||
loading={isDownloading}
|
||||
>
|
||||
Export
|
||||
</Button>
|
||||
</div>
|
||||
),
|
||||
[exportFormat, rowLimit, columnsScope, isDownloading, handleExportRawData],
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
content={popoverContent}
|
||||
trigger="click"
|
||||
placement="bottomRight"
|
||||
arrow={false}
|
||||
open={isPopoverOpen}
|
||||
onOpenChange={setIsPopoverOpen}
|
||||
rootClassName="download-popover"
|
||||
>
|
||||
<Tooltip title="Download" placement="top">
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={
|
||||
isDownloading ? (
|
||||
<Loader2 size={14} className="animate-spin" />
|
||||
) : (
|
||||
<DownloadIcon size={14} />
|
||||
)
|
||||
}
|
||||
data-testid={`periscope-btn-download-${dataSource}`}
|
||||
disabled={isDownloading}
|
||||
/>
|
||||
</Tooltip>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
.download-popover {
|
||||
.logs-download-popover {
|
||||
.ant-popover-inner {
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
@@ -59,7 +59,7 @@
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.download-popover {
|
||||
.logs-download-popover {
|
||||
.ant-popover-inner {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: linear-gradient(
|
||||
@@ -0,0 +1,341 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import { message } from 'antd';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { TelemetryFieldKey } from 'types/api/v5/queryRange';
|
||||
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { DownloadFormats, DownloadRowCounts } from './constants';
|
||||
import LogsDownloadOptionsMenu from './LogsDownloadOptionsMenu';
|
||||
|
||||
// Mock antd message
|
||||
jest.mock('antd', () => {
|
||||
const actual = jest.requireActual('antd');
|
||||
return {
|
||||
...actual,
|
||||
message: {
|
||||
success: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const TEST_IDS = {
|
||||
DOWNLOAD_BUTTON: 'periscope-btn-download-options',
|
||||
} as const;
|
||||
|
||||
interface TestProps {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
filter: string;
|
||||
columns: TelemetryFieldKey[];
|
||||
orderBy: string;
|
||||
}
|
||||
|
||||
const createTestProps = (): TestProps => ({
|
||||
startTime: 1631234567890,
|
||||
endTime: 1631234567999,
|
||||
filter: 'status = 200',
|
||||
columns: [
|
||||
{
|
||||
name: 'http.status',
|
||||
fieldContext: 'attribute',
|
||||
fieldDataType: 'int64',
|
||||
} as TelemetryFieldKey,
|
||||
],
|
||||
orderBy: 'timestamp:desc',
|
||||
});
|
||||
|
||||
const testRenderContent = (props: TestProps): void => {
|
||||
render(
|
||||
<LogsDownloadOptionsMenu
|
||||
startTime={props.startTime}
|
||||
endTime={props.endTime}
|
||||
filter={props.filter}
|
||||
columns={props.columns}
|
||||
orderBy={props.orderBy}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
const testSuccessResponse = (res: any, ctx: any): any =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.set('Content-Type', 'application/octet-stream'),
|
||||
ctx.set('Content-Disposition', 'attachment; filename="export.csv"'),
|
||||
ctx.body('id,value\n1,2\n'),
|
||||
);
|
||||
|
||||
describe('LogsDownloadOptionsMenu', () => {
|
||||
const BASE_URL = ENVIRONMENT.baseURL;
|
||||
const EXPORT_URL = `${BASE_URL}/api/v1/export_raw_data`;
|
||||
let requestSpy: jest.Mock<any, any>;
|
||||
const setupDefaultServer = (): void => {
|
||||
server.use(
|
||||
rest.get(EXPORT_URL, (req, res, ctx) => {
|
||||
const params = req.url.searchParams;
|
||||
const payload = {
|
||||
start: Number(params.get('start')),
|
||||
end: Number(params.get('end')),
|
||||
filter: params.get('filter'),
|
||||
columns: params.getAll('columns'),
|
||||
order_by: params.get('order_by'),
|
||||
limit: Number(params.get('limit')),
|
||||
format: params.get('format'),
|
||||
};
|
||||
requestSpy(payload);
|
||||
return testSuccessResponse(res, ctx);
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
// Mock URL.createObjectURL used by download logic
|
||||
const originalCreateObjectURL = URL.createObjectURL;
|
||||
const originalRevokeObjectURL = URL.revokeObjectURL;
|
||||
|
||||
beforeEach(() => {
|
||||
requestSpy = jest.fn();
|
||||
setupDefaultServer();
|
||||
(message.success as jest.Mock).mockReset();
|
||||
(message.error as jest.Mock).mockReset();
|
||||
// jsdom doesn't implement it by default
|
||||
((URL as unknown) as {
|
||||
createObjectURL: (b: Blob) => string;
|
||||
}).createObjectURL = jest.fn(() => 'blob:mock');
|
||||
((URL as unknown) as {
|
||||
revokeObjectURL: (u: string) => void;
|
||||
}).revokeObjectURL = jest.fn();
|
||||
});
|
||||
|
||||
beforeAll(() => {
|
||||
server.listen();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
server.close();
|
||||
// restore
|
||||
URL.createObjectURL = originalCreateObjectURL;
|
||||
URL.revokeObjectURL = originalRevokeObjectURL;
|
||||
});
|
||||
|
||||
it('renders download button', () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
const button = screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON);
|
||||
expect(button).toBeInTheDocument();
|
||||
expect(button).toHaveClass('periscope-btn', 'ghost');
|
||||
});
|
||||
|
||||
it('shows popover with export options when download button is clicked', () => {
|
||||
const props = createTestProps();
|
||||
render(
|
||||
<LogsDownloadOptionsMenu
|
||||
startTime={props.startTime}
|
||||
endTime={props.endTime}
|
||||
filter={props.filter}
|
||||
columns={props.columns}
|
||||
orderBy={props.orderBy}
|
||||
/>,
|
||||
);
|
||||
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||
expect(screen.getByText('FORMAT')).toBeInTheDocument();
|
||||
expect(screen.getByText('Number of Rows')).toBeInTheDocument();
|
||||
expect(screen.getByText('Columns')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('allows changing export format', () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
|
||||
const csvRadio = screen.getByRole('radio', { name: 'csv' });
|
||||
const jsonlRadio = screen.getByRole('radio', { name: 'jsonl' });
|
||||
|
||||
expect(csvRadio).toBeChecked();
|
||||
fireEvent.click(jsonlRadio);
|
||||
expect(jsonlRadio).toBeChecked();
|
||||
expect(csvRadio).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('allows changing row limit', () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
|
||||
const tenKRadio = screen.getByRole('radio', { name: '10k' });
|
||||
const fiftyKRadio = screen.getByRole('radio', { name: '50k' });
|
||||
|
||||
expect(tenKRadio).toBeChecked();
|
||||
fireEvent.click(fiftyKRadio);
|
||||
expect(fiftyKRadio).toBeChecked();
|
||||
expect(tenKRadio).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('allows changing columns scope', () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
|
||||
const allColumnsRadio = screen.getByRole('radio', { name: 'All' });
|
||||
const selectedColumnsRadio = screen.getByRole('radio', { name: 'Selected' });
|
||||
|
||||
expect(allColumnsRadio).toBeChecked();
|
||||
fireEvent.click(selectedColumnsRadio);
|
||||
expect(selectedColumnsRadio).toBeChecked();
|
||||
expect(allColumnsRadio).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('calls downloadExportData with correct parameters when export button is clicked (Selected columns)', async () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(requestSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
start: props.startTime,
|
||||
end: props.endTime,
|
||||
columns: ['attribute.http.status:int64'],
|
||||
filter: props.filter,
|
||||
order_by: props.orderBy,
|
||||
format: DownloadFormats.CSV,
|
||||
limit: DownloadRowCounts.TEN_K,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('calls downloadExportData with correct parameters when export button is clicked', async () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
fireEvent.click(screen.getByRole('radio', { name: 'All' }));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(requestSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
start: props.startTime,
|
||||
end: props.endTime,
|
||||
columns: [],
|
||||
filter: props.filter,
|
||||
order_by: props.orderBy,
|
||||
format: DownloadFormats.CSV,
|
||||
limit: DownloadRowCounts.TEN_K,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles successful export with success message', async () => {
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(message.success).toHaveBeenCalledWith(
|
||||
'Export completed successfully',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles export failure with error message', async () => {
|
||||
// Override handler to return 500 for this test
|
||||
server.use(rest.get(EXPORT_URL, (_req, res, ctx) => res(ctx.status(500))));
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(message.error).toHaveBeenCalledWith(
|
||||
'Failed to export logs. Please try again.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles UI state correctly during export process', async () => {
|
||||
server.use(
|
||||
rest.get(EXPORT_URL, (_req, res, ctx) => testSuccessResponse(res, ctx)),
|
||||
);
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||
|
||||
// Start export
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
// Check button is disabled during export
|
||||
expect(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON)).toBeDisabled();
|
||||
|
||||
// Check popover is closed immediately after export starts
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
||||
|
||||
// Wait for export to complete and verify button is enabled again
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON)).not.toBeDisabled();
|
||||
});
|
||||
});
|
||||
|
||||
it('uses filename from Content-Disposition and triggers download click', async () => {
|
||||
server.use(
|
||||
rest.get(EXPORT_URL, (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.set('Content-Type', 'application/octet-stream'),
|
||||
ctx.set('Content-Disposition', 'attachment; filename="report.jsonl"'),
|
||||
ctx.body('row\n'),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
const originalCreateElement = document.createElement.bind(document);
|
||||
const anchorEl = originalCreateElement('a') as HTMLAnchorElement;
|
||||
const setAttrSpy = jest.spyOn(anchorEl, 'setAttribute');
|
||||
const clickSpy = jest.spyOn(anchorEl, 'click');
|
||||
const removeSpy = jest.spyOn(anchorEl, 'remove');
|
||||
const createElSpy = jest
|
||||
.spyOn(document, 'createElement')
|
||||
.mockImplementation((tagName: any): any =>
|
||||
tagName === 'a' ? anchorEl : originalCreateElement(tagName),
|
||||
);
|
||||
const appendSpy = jest.spyOn(document.body, 'appendChild');
|
||||
|
||||
const props = createTestProps();
|
||||
testRenderContent(props);
|
||||
|
||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
||||
fireEvent.click(screen.getByText('Export'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(appendSpy).toHaveBeenCalledWith(anchorEl);
|
||||
expect(setAttrSpy).toHaveBeenCalledWith('download', 'report.jsonl');
|
||||
expect(clickSpy).toHaveBeenCalled();
|
||||
expect(removeSpy).toHaveBeenCalled();
|
||||
});
|
||||
expect(anchorEl.getAttribute('download')).toBe('report.jsonl');
|
||||
|
||||
createElSpy.mockRestore();
|
||||
appendSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
@@ -1,15 +1,170 @@
|
||||
import DownloadOptionsMenu from 'components/DownloadOptionsMenu/DownloadOptionsMenu';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { Button, message, Popover, Radio, Tooltip, Typography } from 'antd';
|
||||
import { downloadExportData } from 'api/v1/download/downloadExportData';
|
||||
import { Download, DownloadIcon, Loader2 } from 'lucide-react';
|
||||
import { TelemetryFieldKey } from 'types/api/v5/queryRange';
|
||||
|
||||
import {
|
||||
DownloadColumnsScopes,
|
||||
DownloadFormats,
|
||||
DownloadRowCounts,
|
||||
} from './constants';
|
||||
|
||||
import './LogsDownloadOptionsMenu.styles.scss';
|
||||
|
||||
function convertTelemetryFieldKeyToText(key: TelemetryFieldKey): string {
|
||||
const prefix = key.fieldContext ? `${key.fieldContext}.` : '';
|
||||
const suffix = key.fieldDataType ? `:${key.fieldDataType}` : '';
|
||||
return `${prefix}${key.name}${suffix}`;
|
||||
}
|
||||
|
||||
interface LogsDownloadOptionsMenuProps {
|
||||
stagedQuery: Query | null;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
filter: string;
|
||||
columns: TelemetryFieldKey[];
|
||||
orderBy: string;
|
||||
}
|
||||
|
||||
export default function LogsDownloadOptionsMenu({
|
||||
stagedQuery,
|
||||
startTime,
|
||||
endTime,
|
||||
filter,
|
||||
columns,
|
||||
orderBy,
|
||||
}: LogsDownloadOptionsMenuProps): JSX.Element {
|
||||
const [exportFormat, setExportFormat] = useState<string>(DownloadFormats.CSV);
|
||||
const [rowLimit, setRowLimit] = useState<number>(DownloadRowCounts.TEN_K);
|
||||
const [columnsScope, setColumnsScope] = useState<string>(
|
||||
DownloadColumnsScopes.ALL,
|
||||
);
|
||||
const [isDownloading, setIsDownloading] = useState<boolean>(false);
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||
|
||||
const handleExportRawData = useCallback(async (): Promise<void> => {
|
||||
setIsPopoverOpen(false);
|
||||
try {
|
||||
setIsDownloading(true);
|
||||
const downloadOptions = {
|
||||
source: 'logs',
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
columns:
|
||||
columnsScope === DownloadColumnsScopes.SELECTED
|
||||
? columns.map((col) => convertTelemetryFieldKeyToText(col))
|
||||
: [],
|
||||
filter,
|
||||
orderBy,
|
||||
format: exportFormat,
|
||||
limit: rowLimit,
|
||||
};
|
||||
|
||||
await downloadExportData(downloadOptions);
|
||||
message.success('Export completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Error exporting logs:', error);
|
||||
message.error('Failed to export logs. Please try again.');
|
||||
} finally {
|
||||
setIsDownloading(false);
|
||||
}
|
||||
}, [
|
||||
startTime,
|
||||
endTime,
|
||||
columnsScope,
|
||||
columns,
|
||||
filter,
|
||||
orderBy,
|
||||
exportFormat,
|
||||
rowLimit,
|
||||
setIsDownloading,
|
||||
setIsPopoverOpen,
|
||||
]);
|
||||
|
||||
const popoverContent = useMemo(
|
||||
() => (
|
||||
<div
|
||||
className="export-options-container"
|
||||
role="dialog"
|
||||
aria-label="Export options"
|
||||
aria-modal="true"
|
||||
>
|
||||
<div className="export-format">
|
||||
<Typography.Text className="title">FORMAT</Typography.Text>
|
||||
<Radio.Group
|
||||
value={exportFormat}
|
||||
onChange={(e): void => setExportFormat(e.target.value)}
|
||||
>
|
||||
<Radio value={DownloadFormats.CSV}>csv</Radio>
|
||||
<Radio value={DownloadFormats.JSONL}>jsonl</Radio>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
|
||||
<div className="horizontal-line" />
|
||||
|
||||
<div className="row-limit">
|
||||
<Typography.Text className="title">Number of Rows</Typography.Text>
|
||||
<Radio.Group
|
||||
value={rowLimit}
|
||||
onChange={(e): void => setRowLimit(e.target.value)}
|
||||
>
|
||||
<Radio value={DownloadRowCounts.TEN_K}>10k</Radio>
|
||||
<Radio value={DownloadRowCounts.THIRTY_K}>30k</Radio>
|
||||
<Radio value={DownloadRowCounts.FIFTY_K}>50k</Radio>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
|
||||
<div className="horizontal-line" />
|
||||
|
||||
<div className="columns-scope">
|
||||
<Typography.Text className="title">Columns</Typography.Text>
|
||||
<Radio.Group
|
||||
value={columnsScope}
|
||||
onChange={(e): void => setColumnsScope(e.target.value)}
|
||||
>
|
||||
<Radio value={DownloadColumnsScopes.ALL}>All</Radio>
|
||||
<Radio value={DownloadColumnsScopes.SELECTED}>Selected</Radio>
|
||||
</Radio.Group>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<Download size={16} />}
|
||||
onClick={handleExportRawData}
|
||||
className="export-button"
|
||||
disabled={isDownloading}
|
||||
loading={isDownloading}
|
||||
>
|
||||
Export
|
||||
</Button>
|
||||
</div>
|
||||
),
|
||||
[exportFormat, rowLimit, columnsScope, isDownloading, handleExportRawData],
|
||||
);
|
||||
|
||||
return (
|
||||
<DownloadOptionsMenu stagedQuery={stagedQuery} dataSource={DataSource.LOGS} />
|
||||
<Popover
|
||||
content={popoverContent}
|
||||
trigger="click"
|
||||
placement="bottomRight"
|
||||
arrow={false}
|
||||
open={isPopoverOpen}
|
||||
onOpenChange={setIsPopoverOpen}
|
||||
rootClassName="logs-download-popover"
|
||||
>
|
||||
<Tooltip title="Download" placement="top">
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={
|
||||
isDownloading ? (
|
||||
<Loader2 size={18} className="animate-spin" />
|
||||
) : (
|
||||
<DownloadIcon size={15} />
|
||||
)
|
||||
}
|
||||
data-testid="periscope-btn-download-options"
|
||||
disabled={isDownloading}
|
||||
/>
|
||||
</Tooltip>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
import DownloadOptionsMenu from 'components/DownloadOptionsMenu/DownloadOptionsMenu';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
interface TracesDownloadOptionsMenuProps {
|
||||
stagedQuery: Query | null;
|
||||
}
|
||||
|
||||
export default function TracesDownloadOptionsMenu({
|
||||
stagedQuery,
|
||||
}: TracesDownloadOptionsMenuProps): JSX.Element {
|
||||
return (
|
||||
<DownloadOptionsMenu
|
||||
stagedQuery={stagedQuery}
|
||||
dataSource={DataSource.TRACES}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -6,14 +6,12 @@ import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { ArrowUp10, Minus } from 'lucide-react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
|
||||
import QueryStatus from './QueryStatus';
|
||||
|
||||
function LogsActionsContainer({
|
||||
listQuery,
|
||||
stagedQuery,
|
||||
selectedPanelType,
|
||||
showFrequencyChart,
|
||||
handleToggleFrequencyChart,
|
||||
@@ -23,9 +21,10 @@ function LogsActionsContainer({
|
||||
isLoading,
|
||||
isError,
|
||||
isSuccess,
|
||||
minTime,
|
||||
maxTime,
|
||||
}: {
|
||||
listQuery: any;
|
||||
stagedQuery: Query | null;
|
||||
selectedPanelType: PANEL_TYPES;
|
||||
showFrequencyChart: boolean;
|
||||
handleToggleFrequencyChart: () => void;
|
||||
@@ -35,6 +34,8 @@ function LogsActionsContainer({
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
isSuccess: boolean;
|
||||
minTime: number;
|
||||
maxTime: number;
|
||||
}): JSX.Element {
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
@@ -95,7 +96,13 @@ function LogsActionsContainer({
|
||||
/>
|
||||
</div>
|
||||
<div className="download-options-container">
|
||||
<LogsDownloadOptionsMenu stagedQuery={stagedQuery} />
|
||||
<LogsDownloadOptionsMenu
|
||||
startTime={minTime}
|
||||
endTime={maxTime}
|
||||
filter={listQuery?.filter?.expression || ''}
|
||||
columns={config.addColumn?.value || []}
|
||||
orderBy={orderBy}
|
||||
/>
|
||||
</div>
|
||||
<div className="format-options-container">
|
||||
<LogsFormatOptionsMenu
|
||||
|
||||
@@ -435,7 +435,6 @@ function LogsExplorerViewsContainer({
|
||||
{!showLiveLogs && (
|
||||
<LogsActionsContainer
|
||||
listQuery={listQuery}
|
||||
stagedQuery={stagedQuery}
|
||||
selectedPanelType={selectedPanelType}
|
||||
showFrequencyChart={showFrequencyChart}
|
||||
handleToggleFrequencyChart={handleToggleFrequencyChart}
|
||||
@@ -445,6 +444,8 @@ function LogsExplorerViewsContainer({
|
||||
isLoading={isLoading}
|
||||
isError={isError}
|
||||
isSuccess={isSuccess}
|
||||
minTime={minTime}
|
||||
maxTime={maxTime}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
@@ -168,7 +168,7 @@ describe('LogsExplorerViews -', () => {
|
||||
lodsQueryServerRequest();
|
||||
const { queryByTestId } = renderer();
|
||||
|
||||
const periscopeDownloadButtonTestId = 'periscope-btn-download-logs';
|
||||
const periscopeDownloadButtonTestId = 'periscope-btn-download-options';
|
||||
const periscopeFormatButtonTestId = 'periscope-btn-format-options';
|
||||
|
||||
// Test that the periscope button is present
|
||||
|
||||
@@ -13,8 +13,8 @@ import {
|
||||
usePatchRole,
|
||||
} from 'api/generated/services/role';
|
||||
import {
|
||||
AuthtypesPostableRoleDTO,
|
||||
RenderErrorResponseDTO,
|
||||
RoletypesPostableRoleDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { ErrorType } from 'api/generatedAPIInstance';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -114,7 +114,7 @@ function CreateRoleModal({
|
||||
data: { description: values.description || '' },
|
||||
});
|
||||
} else {
|
||||
const data: RoletypesPostableRoleDTO = {
|
||||
const data: AuthtypesPostableRoleDTO = {
|
||||
name: values.name,
|
||||
...(values.description ? { description: values.description } : {}),
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { Pagination, Skeleton } from 'antd';
|
||||
import { useListRoles } from 'api/generated/services/role';
|
||||
import { RoletypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -20,7 +20,7 @@ const PAGE_SIZE = 20;
|
||||
|
||||
type DisplayItem =
|
||||
| { type: 'section'; label: string; count?: number }
|
||||
| { type: 'role'; role: RoletypesRoleDTO };
|
||||
| { type: 'role'; role: AuthtypesRoleDTO };
|
||||
|
||||
interface RolesListingTableProps {
|
||||
searchQuery: string;
|
||||
@@ -187,7 +187,7 @@ function RolesListingTable({
|
||||
};
|
||||
|
||||
// todo: use table from periscope when its available for consumption
|
||||
const renderRow = (role: RoletypesRoleDTO): JSX.Element => (
|
||||
const renderRow = (role: AuthtypesRoleDTO): JSX.Element => (
|
||||
<div
|
||||
key={role.id}
|
||||
className={`roles-table-row ${
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
.trace-explorer-controls {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.order-by-container {
|
||||
|
||||
@@ -14,7 +14,6 @@ import logEvent from 'api/common/logEvent';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import ListViewOrderBy from 'components/OrderBy/ListViewOrderBy';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import TracesDownloadOptionsMenu from 'components/TracesDownloadOptionsMenu/TracesDownloadOptionsMenu';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -239,8 +238,6 @@ function ListView({
|
||||
/>
|
||||
</div>
|
||||
|
||||
<TracesDownloadOptionsMenu stagedQuery={stagedQuery} />
|
||||
|
||||
<TraceExplorerControls
|
||||
isLoading={isFetching}
|
||||
totalCount={totalCount}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { RoletypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
const orgId = '019ba2bb-2fa1-7b24-8159-cfca08617ef9';
|
||||
|
||||
export const managedRoles: RoletypesRoleDTO[] = [
|
||||
export const managedRoles: AuthtypesRoleDTO[] = [
|
||||
{
|
||||
id: '019c24aa-2248-756f-9833-984f1ab63819',
|
||||
createdAt: new Date('2026-02-03T18:00:55.624356Z'),
|
||||
@@ -35,7 +35,7 @@ export const managedRoles: RoletypesRoleDTO[] = [
|
||||
},
|
||||
];
|
||||
|
||||
export const customRoles: RoletypesRoleDTO[] = [
|
||||
export const customRoles: AuthtypesRoleDTO[] = [
|
||||
{
|
||||
id: '019c24aa-3333-0001-aaaa-111111111111',
|
||||
createdAt: new Date('2026-02-10T10:30:00.000Z'),
|
||||
@@ -56,7 +56,7 @@ export const customRoles: RoletypesRoleDTO[] = [
|
||||
},
|
||||
];
|
||||
|
||||
export const allRoles: RoletypesRoleDTO[] = [...managedRoles, ...customRoles];
|
||||
export const allRoles: AuthtypesRoleDTO[] = [...managedRoles, ...customRoles];
|
||||
|
||||
export const listRolesSuccessResponse = {
|
||||
status: 'success',
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import { QueryRangePayloadV5 } from 'types/api/v5/queryRange';
|
||||
|
||||
export interface ExportRawDataProps {
|
||||
source: string;
|
||||
format: string;
|
||||
body: QueryRangePayloadV5;
|
||||
start: number;
|
||||
end: number;
|
||||
columns: string[];
|
||||
filter: string;
|
||||
orderBy: string;
|
||||
limit: number;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -16,7 +15,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Tags: []string{"role"},
|
||||
Summary: "Create role",
|
||||
Description: "This endpoint creates a role",
|
||||
Request: new(roletypes.PostableRole),
|
||||
Request: new(authtypes.PostableRole),
|
||||
RequestContentType: "",
|
||||
Response: new(types.Identifiable),
|
||||
ResponseContentType: "application/json",
|
||||
@@ -35,7 +34,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Description: "This endpoint lists all roles",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: make([]*roletypes.Role, 0),
|
||||
Response: make([]*authtypes.Role, 0),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
@@ -52,7 +51,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Description: "This endpoint gets a role",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(roletypes.Role),
|
||||
Response: new(authtypes.Role),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
@@ -84,7 +83,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
Tags: []string{"role"},
|
||||
Summary: "Patch role",
|
||||
Description: "This endpoint patches a role",
|
||||
Request: new(roletypes.PatchableRole),
|
||||
Request: new(authtypes.PatchableRole),
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "application/json",
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
)
|
||||
@@ -30,10 +29,10 @@ type AuthZ interface {
|
||||
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
|
||||
|
||||
// Creates the role.
|
||||
Create(context.Context, valuer.UUID, *roletypes.Role) error
|
||||
Create(context.Context, valuer.UUID, *authtypes.Role) error
|
||||
|
||||
// Gets the role if it exists or creates one.
|
||||
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
|
||||
GetOrCreate(context.Context, valuer.UUID, *authtypes.Role) (*authtypes.Role, error)
|
||||
|
||||
// Gets the objects associated with the given role and relation.
|
||||
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
|
||||
@@ -42,7 +41,7 @@ type AuthZ interface {
|
||||
GetResources(context.Context) []*authtypes.Resource
|
||||
|
||||
// Patches the role.
|
||||
Patch(context.Context, valuer.UUID, *roletypes.Role) error
|
||||
Patch(context.Context, valuer.UUID, *authtypes.Role) error
|
||||
|
||||
// Patches the objects in authorization server associated with the given role and relation
|
||||
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
|
||||
@@ -51,19 +50,19 @@ type AuthZ interface {
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Gets the role
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*authtypes.Role, error)
|
||||
|
||||
// Gets the role by org_id and name
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*authtypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization.
|
||||
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
|
||||
List(context.Context, valuer.UUID) ([]*authtypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization filtered by name
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*authtypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization filtered by ids
|
||||
ListByOrgIDAndIDs(context.Context, valuer.UUID, []valuer.UUID) ([]*roletypes.Role, error)
|
||||
ListByOrgIDAndIDs(context.Context, valuer.UUID, []valuer.UUID) ([]*authtypes.Role, error)
|
||||
|
||||
// Grants a role to the subject based on role name.
|
||||
Grant(context.Context, valuer.UUID, []string, string) error
|
||||
@@ -75,7 +74,7 @@ type AuthZ interface {
|
||||
ModifyGrant(context.Context, valuer.UUID, []string, []string, string) error
|
||||
|
||||
// Bootstrap the managed roles.
|
||||
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
|
||||
CreateManagedRoles(context.Context, valuer.UUID, []*authtypes.Role) error
|
||||
|
||||
// Bootstrap managed roles transactions and user assignments
|
||||
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -14,11 +14,11 @@ type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
|
||||
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) authtypes.RoleStore {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (store *store) Create(ctx context.Context, role *roletypes.StorableRole) error {
|
||||
func (store *store) Create(ctx context.Context, role *authtypes.StorableRole) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -32,8 +32,8 @@ func (store *store) Create(ctx context.Context, role *roletypes.StorableRole) er
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.StorableRole, error) {
|
||||
role := new(roletypes.StorableRole)
|
||||
func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.StorableRole, error) {
|
||||
role := new(authtypes.StorableRole)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -43,14 +43,14 @@ func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID)
|
||||
Where("id = ?", id).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, roletypes.ErrCodeRoleNotFound, "role with id: %s doesn't exist", id)
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeRoleNotFound, "role with id: %s doesn't exist", id)
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.StorableRole, error) {
|
||||
role := new(roletypes.StorableRole)
|
||||
func (store *store) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*authtypes.StorableRole, error) {
|
||||
role := new(authtypes.StorableRole)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -60,14 +60,14 @@ func (store *store) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, na
|
||||
Where("name = ?", name).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, roletypes.ErrCodeRoleNotFound, "role with name: %s doesn't exist", name)
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeRoleNotFound, "role with name: %s doesn't exist", name)
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (store *store) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.StorableRole, error) {
|
||||
roles := make([]*roletypes.StorableRole, 0)
|
||||
func (store *store) List(ctx context.Context, orgID valuer.UUID) ([]*authtypes.StorableRole, error) {
|
||||
roles := make([]*authtypes.StorableRole, 0)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -82,8 +82,8 @@ func (store *store) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.S
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (store *store) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.StorableRole, error) {
|
||||
roles := make([]*roletypes.StorableRole, 0)
|
||||
func (store *store) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*authtypes.StorableRole, error) {
|
||||
roles := make([]*authtypes.StorableRole, 0)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -99,7 +99,7 @@ func (store *store) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID,
|
||||
if len(roles) != len(names) {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(
|
||||
nil,
|
||||
roletypes.ErrCodeRoleNotFound,
|
||||
authtypes.ErrCodeRoleNotFound,
|
||||
"not all roles found for the provided names: %v", names,
|
||||
)
|
||||
}
|
||||
@@ -107,8 +107,8 @@ func (store *store) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID,
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (store *store) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.StorableRole, error) {
|
||||
roles := make([]*roletypes.StorableRole, 0)
|
||||
func (store *store) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*authtypes.StorableRole, error) {
|
||||
roles := make([]*authtypes.StorableRole, 0)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -124,7 +124,7 @@ func (store *store) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, id
|
||||
if len(roles) != len(ids) {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(
|
||||
nil,
|
||||
roletypes.ErrCodeRoleNotFound,
|
||||
authtypes.ErrCodeRoleNotFound,
|
||||
"not all roles found for the provided ids: %v", ids,
|
||||
)
|
||||
}
|
||||
@@ -132,7 +132,7 @@ func (store *store) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, id
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (store *store) Update(ctx context.Context, orgID valuer.UUID, role *roletypes.StorableRole) error {
|
||||
func (store *store) Update(ctx context.Context, orgID valuer.UUID, role *authtypes.StorableRole) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
@@ -153,12 +153,12 @@ func (store *store) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUI
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(roletypes.StorableRole)).
|
||||
Model(new(authtypes.StorableRole)).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", id).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapNotFoundErrf(err, roletypes.ErrCodeRoleNotFound, "role with id %s doesn't exist", id)
|
||||
return store.sqlstore.WrapNotFoundErrf(err, authtypes.ErrCodeRoleNotFound, "role with id %s doesn't exist", id)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
@@ -19,7 +18,7 @@ import (
|
||||
|
||||
type provider struct {
|
||||
server *openfgaserver.Server
|
||||
store roletypes.Store
|
||||
store authtypes.RoleStore
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
@@ -68,61 +67,61 @@ func (provider *provider) ListObjects(ctx context.Context, subject string, relat
|
||||
return provider.server.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.Role, error) {
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return roletypes.NewRoleFromStorableRole(storableRole), nil
|
||||
return authtypes.NewRoleFromStorableRole(storableRole), nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*authtypes.Role, error) {
|
||||
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return roletypes.NewRoleFromStorableRole(storableRole), nil
|
||||
return authtypes.NewRoleFromStorableRole(storableRole), nil
|
||||
}
|
||||
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*authtypes.Role, error) {
|
||||
storableRoles, err := provider.store.List(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
roles := make([]*authtypes.Role, len(storableRoles))
|
||||
for idx, storableRole := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
|
||||
roles[idx] = authtypes.NewRoleFromStorableRole(storableRole)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*authtypes.Role, error) {
|
||||
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
roles := make([]*authtypes.Role, len(storableRoles))
|
||||
for idx, storable := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
|
||||
roles[idx] = authtypes.NewRoleFromStorableRole(storable)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.Role, error) {
|
||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*authtypes.Role, error) {
|
||||
storableRoles, err := provider.store.ListByOrgIDAndIDs(ctx, orgID, ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
roles := make([]*authtypes.Role, len(storableRoles))
|
||||
for idx, storable := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
|
||||
roles[idx] = authtypes.NewRoleFromStorableRole(storable)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
@@ -179,10 +178,10 @@ func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names [
|
||||
return provider.Write(ctx, nil, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*authtypes.Role) error {
|
||||
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
for _, role := range managedRoles {
|
||||
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
err := provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -199,15 +198,15 @@ func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID,
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
|
||||
return provider.Grant(ctx, orgID, []string{roletypes.SigNozAdminRoleName}, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
|
||||
return provider.Grant(ctx, orgID, []string{authtypes.SigNozAdminRoleName}, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
|
||||
}
|
||||
|
||||
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *authtypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, authtypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *authtypes.Role) (*authtypes.Role, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, authtypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
@@ -215,19 +214,19 @@ func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource
|
||||
}
|
||||
|
||||
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
return nil, errors.Newf(errors.TypeUnsupported, authtypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *authtypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, authtypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
return errors.Newf(errors.TypeUnsupported, authtypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
return errors.Newf(errors.TypeUnsupported, authtypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -30,13 +29,13 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
req := new(roletypes.PostableRole)
|
||||
req := new(authtypes.PostableRole)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
role := roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID))
|
||||
role := authtypes.NewRole(req.Name, req.Description, authtypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID))
|
||||
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), role)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -56,7 +55,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
id, ok := mux.Vars(r)["id"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, roletypes.ErrCodeRoleInvalidInput, "id is missing from the request"))
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeRoleInvalidInput, "id is missing from the request"))
|
||||
return
|
||||
}
|
||||
roleID, err := valuer.NewUUID(id)
|
||||
@@ -84,7 +83,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
id, ok := mux.Vars(r)["id"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, roletypes.ErrCodeRoleInvalidInput, "id is missing from the request"))
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeRoleInvalidInput, "id is missing from the request"))
|
||||
return
|
||||
}
|
||||
roleID, err := valuer.NewUUID(id)
|
||||
@@ -95,7 +94,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
relationStr, ok := mux.Vars(r)["relation"]
|
||||
if !ok {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, roletypes.ErrCodeRoleInvalidInput, "relation is missing from the request"))
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeRoleInvalidInput, "relation is missing from the request"))
|
||||
return
|
||||
}
|
||||
relation, err := authtypes.NewRelation(relationStr)
|
||||
@@ -150,7 +149,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
req := new(roletypes.PatchableRole)
|
||||
req := new(authtypes.PatchableRole)
|
||||
if err := binding.JSON.BindBody(r.Body, req); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -43,7 +42,7 @@ func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
commentCtx := ctxtypes.CommentFromContext(ctx)
|
||||
authtype, ok := commentCtx.Map()["auth_type"]
|
||||
authtype, ok := commentCtx.Map()["identn_provider"]
|
||||
if ok && (authtype == authtypes.IdentNProviderAPIkey.StringValue()) {
|
||||
if err := claims.IsViewer(); err != nil {
|
||||
middleware.logger.WarnContext(ctx, authzDeniedMessage, "claims", claims)
|
||||
@@ -56,9 +55,9 @@ func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
selectors := []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozEditorRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozViewerRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAdminRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozEditorRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozViewerRoleName),
|
||||
}
|
||||
|
||||
err = middleware.authzService.CheckWithTupleCreation(
|
||||
@@ -95,7 +94,7 @@ func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
commentCtx := ctxtypes.CommentFromContext(ctx)
|
||||
authtype, ok := commentCtx.Map()["auth_type"]
|
||||
authtype, ok := commentCtx.Map()["identn_provider"]
|
||||
if ok && (authtype == authtypes.IdentNProviderAPIkey.StringValue()) {
|
||||
if err := claims.IsEditor(); err != nil {
|
||||
middleware.logger.WarnContext(ctx, authzDeniedMessage, "claims", claims)
|
||||
@@ -108,8 +107,8 @@ func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
selectors := []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozEditorRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAdminRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozEditorRoleName),
|
||||
}
|
||||
|
||||
err = middleware.authzService.CheckWithTupleCreation(
|
||||
@@ -146,7 +145,7 @@ func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
commentCtx := ctxtypes.CommentFromContext(ctx)
|
||||
authtype, ok := commentCtx.Map()["auth_type"]
|
||||
authtype, ok := commentCtx.Map()["identn_provider"]
|
||||
if ok && (authtype == authtypes.IdentNProviderAPIkey.StringValue()) {
|
||||
if err := claims.IsAdmin(); err != nil {
|
||||
middleware.logger.WarnContext(ctx, authzDeniedMessage, "claims", claims)
|
||||
@@ -159,7 +158,7 @@ func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
selectors := []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAdminRoleName),
|
||||
}
|
||||
|
||||
err = middleware.authzService.CheckWithTupleCreation(
|
||||
|
||||
@@ -101,13 +101,8 @@ func (provider *provider) GetIdentity(req *http.Request) (*authtypes.Identity, e
|
||||
return nil, err
|
||||
}
|
||||
|
||||
identity := authtypes.Identity{
|
||||
UserID: user.ID,
|
||||
Role: apiKey.Role,
|
||||
Email: user.Email,
|
||||
OrgID: user.OrgID,
|
||||
}
|
||||
return &identity, nil
|
||||
identity := authtypes.NewIdentity(user.ID, user.OrgID, user.Email, apiKey.Role, provider.Name())
|
||||
return identity, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Post(ctx context.Context, _ *http.Request, _ authtypes.Claims) {
|
||||
|
||||
65
pkg/modules/cloudintegration/cloudintegration.go
Normal file
65
pkg/modules/cloudintegration/cloudintegration.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package cloudintegration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
citypes "github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
CreateAccount(ctx context.Context, account *citypes.Account) error
|
||||
|
||||
// GetAccount returns cloud integration account
|
||||
GetAccount(ctx context.Context, orgID, accountID valuer.UUID) (*citypes.Account, error)
|
||||
|
||||
// ListAccounts lists accounts where agent is connected
|
||||
ListAccounts(ctx context.Context, orgID valuer.UUID) ([]*citypes.Account, error)
|
||||
|
||||
// UpdateAccount updates the cloud integration account for a specific organization.
|
||||
UpdateAccount(ctx context.Context, account *citypes.Account) error
|
||||
|
||||
// DisconnectAccount soft deletes/removes a cloud integration account.
|
||||
DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID) error
|
||||
|
||||
// GetConnectionArtifact returns cloud provider specific connection information,
|
||||
// client side handles how this information is shown
|
||||
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.ConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
||||
|
||||
// ListServicesMetadata returns the list of services metadata for a cloud provider attached with the integrationID.
|
||||
// This just returns a summary of the service and not the whole service definition
|
||||
ListServicesMetadata(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID) ([]*citypes.ServiceMetadata, error)
|
||||
|
||||
// GetService returns service definition details for a serviceID. This returns config and
|
||||
// other details required to show in service details page on web client.
|
||||
GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID string) (*citypes.Service, error)
|
||||
|
||||
// UpdateService updates cloud integration service
|
||||
UpdateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService) error
|
||||
|
||||
// AgentCheckIn is called by agent to heartbeat and get latest config in response.
|
||||
AgentCheckIn(ctx context.Context, orgID valuer.UUID, req *citypes.AgentCheckInRequest) (*citypes.AgentCheckInResponse, error)
|
||||
|
||||
// GetDashboardByID returns dashboard JSON for a given dashboard id.
|
||||
// this only returns the dashboard when the service (embedded in dashboard id) is enabled
|
||||
// in the org for any cloud integration account
|
||||
GetDashboardByID(ctx context.Context, orgID valuer.UUID, id string) (*dashboardtypes.Dashboard, error)
|
||||
|
||||
// ListDashboards returns list of dashboards across all connected cloud integration accounts
|
||||
// for enabled services in the org. This list gets added to dashboard list page
|
||||
ListDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
GetConnectionArtifact(http.ResponseWriter, *http.Request)
|
||||
ListAccounts(http.ResponseWriter, *http.Request)
|
||||
GetAccount(http.ResponseWriter, *http.Request)
|
||||
UpdateAccount(http.ResponseWriter, *http.Request)
|
||||
DisconnectAccount(http.ResponseWriter, *http.Request)
|
||||
ListServicesMetadata(http.ResponseWriter, *http.Request)
|
||||
GetService(http.ResponseWriter, *http.Request)
|
||||
UpdateService(http.ResponseWriter, *http.Request)
|
||||
AgentCheckIn(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -27,7 +27,12 @@ type OrgConfig struct {
|
||||
}
|
||||
|
||||
type PasswordConfig struct {
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
Invite InviteConfig `mapstructure:"invite"`
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
}
|
||||
|
||||
type InviteConfig struct {
|
||||
MaxTokenLifetime time.Duration `mapstructure:"max_token_lifetime"`
|
||||
}
|
||||
|
||||
type ResetConfig struct {
|
||||
@@ -46,6 +51,9 @@ func newConfig() factory.Config {
|
||||
AllowSelf: false,
|
||||
MaxTokenLifetime: 6 * time.Hour,
|
||||
},
|
||||
Invite: InviteConfig{
|
||||
MaxTokenLifetime: 48 * time.Hour,
|
||||
},
|
||||
},
|
||||
Root: RootConfig{
|
||||
Enabled: false,
|
||||
@@ -61,6 +69,10 @@ func (c Config) Validate() error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Password.Invite.MaxTokenLifetime <= 0 {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::invite::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Root.Enabled {
|
||||
if c.Root.Email.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/dustin/go-humanize"
|
||||
)
|
||||
@@ -204,7 +203,7 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
|
||||
resetLink := userWithToken.ResetPasswordToken.FactorPasswordResetLink(frontendBaseUrl)
|
||||
|
||||
tokenLifetime := m.config.Password.Reset.MaxTokenLifetime
|
||||
tokenLifetime := m.config.Password.Invite.MaxTokenLifetime
|
||||
humanizedTokenLifetime := strings.TrimSpace(humanize.RelTime(time.Now(), time.Now().Add(tokenLifetime), "", ""))
|
||||
|
||||
if err := m.emailing.SendHTML(ctx, userWithToken.User.Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
|
||||
@@ -263,7 +262,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
|
||||
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
|
||||
err := module.authz.Grant(ctx, input.OrgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
err := module.authz.Grant(ctx, input.OrgID, []string{authtypes.MustGetSigNozManagedRoleFromExistingRole(input.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -333,8 +332,8 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
if user.Role != "" && user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role)},
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
[]string{authtypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role)},
|
||||
[]string{authtypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -395,7 +394,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
}
|
||||
|
||||
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
|
||||
err = module.authz.Revoke(ctx, orgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
err = module.authz.Revoke(ctx, orgID, []string{authtypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -461,7 +460,11 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
|
||||
}
|
||||
|
||||
// create a new token
|
||||
resetPasswordToken, err := types.NewResetPasswordToken(password.ID, time.Now().Add(module.config.Password.Reset.MaxTokenLifetime))
|
||||
tokenLifetime := module.config.Password.Reset.MaxTokenLifetime
|
||||
if user.Status == types.UserStatusPendingInvite {
|
||||
tokenLifetime = module.config.Password.Invite.MaxTokenLifetime
|
||||
}
|
||||
resetPasswordToken, err := types.NewResetPasswordToken(password.ID, time.Now().Add(tokenLifetime))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -501,6 +504,9 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
resetLink := token.FactorPasswordResetLink(frontendBaseURL)
|
||||
|
||||
tokenLifetime := module.config.Password.Reset.MaxTokenLifetime
|
||||
if user.Status == types.UserStatusPendingInvite {
|
||||
tokenLifetime = module.config.Password.Invite.MaxTokenLifetime
|
||||
}
|
||||
humanizedTokenLifetime := strings.TrimSpace(humanize.RelTime(time.Now(), time.Now().Add(tokenLifetime), "", ""))
|
||||
|
||||
if err := module.emailing.SendHTML(
|
||||
@@ -558,7 +564,7 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
if err = module.authz.Grant(
|
||||
ctx,
|
||||
user.OrgID,
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
[]string{authtypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
@@ -692,7 +698,7 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
|
||||
return nil, err
|
||||
}
|
||||
|
||||
managedRoles := roletypes.NewManagedRoles(organization.ID)
|
||||
managedRoles := authtypes.NewManagedRoles(organization.ID)
|
||||
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -793,7 +799,7 @@ func (module *Module) activatePendingUser(ctx context.Context, user *types.User)
|
||||
err := module.authz.Grant(
|
||||
ctx,
|
||||
user.OrgID,
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
[]string{authtypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil),
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -159,8 +158,8 @@ func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID
|
||||
if oldRole != types.RoleAdmin {
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole)},
|
||||
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin)},
|
||||
[]string{authtypes.MustGetSigNozManagedRoleFromExistingRole(oldRole)},
|
||||
[]string{authtypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin)},
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/dustin/go-humanize"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -158,7 +159,8 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
metricNames := make([]string, 0)
|
||||
for idx, query := range req.CompositeQuery.Queries {
|
||||
event.QueryType = query.Type.StringValue()
|
||||
if query.Type == qbtypes.QueryTypeBuilder {
|
||||
switch query.Type {
|
||||
case qbtypes.QueryTypeBuilder:
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]); ok {
|
||||
for _, agg := range spec.Aggregations {
|
||||
if agg.MetricName != "" {
|
||||
@@ -236,7 +238,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
}
|
||||
} else if query.Type == qbtypes.QueryTypePromQL {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
event.MetricsUsed = true
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.PromQuery:
|
||||
@@ -247,7 +249,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
}
|
||||
} else if query.Type == qbtypes.QueryTypeClickHouseSQL {
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.ClickHouseQuery:
|
||||
if strings.TrimSpace(spec.Query) != "" {
|
||||
@@ -256,7 +258,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
event.TracesUsed = strings.Contains(spec.Query, "signoz_traces")
|
||||
}
|
||||
}
|
||||
} else if query.Type == qbtypes.QueryTypeTraceOperator {
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderTraceOperator); ok {
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
@@ -276,23 +278,9 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch temporality for all metrics at once
|
||||
var metricTemporality map[string]metrictypes.Temporality
|
||||
var metricTypes map[string]metrictypes.Type
|
||||
if len(metricNames) > 0 {
|
||||
var err error
|
||||
metricTemporality, metricTypes, err = q.metadataStore.FetchTemporalityAndTypeMulti(ctx, req.Start, req.End, metricNames...)
|
||||
if err != nil {
|
||||
q.logger.WarnContext(ctx, "failed to fetch metric temporality", "error", err, "metrics", metricNames)
|
||||
// Continue without temporality - statement builder will handle unspecified
|
||||
metricTemporality = make(map[string]metrictypes.Temporality)
|
||||
metricTypes = make(map[string]metrictypes.Type)
|
||||
}
|
||||
q.logger.DebugContext(ctx, "fetched metric temporalities and types", "metric_temporality", metricTemporality, "metric_types", metricTypes)
|
||||
}
|
||||
|
||||
queries := make(map[string]qbtypes.Query)
|
||||
steps := make(map[string]qbtypes.Step)
|
||||
missingMetrics := []string{}
|
||||
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
var queryName string
|
||||
@@ -374,15 +362,26 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
var metricTemporality map[string]metrictypes.Temporality
|
||||
var metricTypes map[string]metrictypes.Type
|
||||
if len(metricNames) > 0 {
|
||||
var err error
|
||||
metricTemporality, metricTypes, err = q.metadataStore.FetchTemporalityAndTypeMulti(ctx, req.Start, req.End, metricNames...)
|
||||
if err != nil {
|
||||
q.logger.WarnContext(ctx, "failed to fetch metric temporality", "error", err, "metrics", metricNames)
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "failed to fetch metrics temporality")
|
||||
}
|
||||
q.logger.DebugContext(ctx, "fetched metric temporalities and types", "metric_temporality", metricTemporality, "metric_types", metricTypes)
|
||||
}
|
||||
for i := range spec.Aggregations {
|
||||
if spec.Aggregations[i].MetricName != "" && spec.Aggregations[i].Temporality == metrictypes.Unknown {
|
||||
if temp, ok := metricTemporality[spec.Aggregations[i].MetricName]; ok && temp != metrictypes.Unknown {
|
||||
spec.Aggregations[i].Temporality = temp
|
||||
}
|
||||
}
|
||||
// TODO(srikanthccv): warn when the metric is missing
|
||||
if spec.Aggregations[i].Temporality == metrictypes.Unknown {
|
||||
spec.Aggregations[i].Temporality = metrictypes.Unspecified
|
||||
missingMetrics = append(missingMetrics, spec.Aggregations[i].MetricName)
|
||||
continue
|
||||
}
|
||||
|
||||
if spec.Aggregations[i].MetricName != "" && spec.Aggregations[i].Type == metrictypes.UnspecifiedType {
|
||||
@@ -409,6 +408,24 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(missingMetrics) > 0 {
|
||||
lastSeenInfo, _ := q.metadataStore.FetchLastSeenInfoMulti(ctx, missingMetrics...)
|
||||
lastSeenStr := func(name string) string {
|
||||
if ts, ok := lastSeenInfo[name]; ok && ts > 0 {
|
||||
ago := humanize.RelTime(time.UnixMilli(ts), time.Now(), "ago", "from now")
|
||||
return fmt.Sprintf("%s (last seen %s)", name, ago)
|
||||
}
|
||||
return name
|
||||
}
|
||||
if len(missingMetrics) == 1 {
|
||||
return nil, errors.NewNotFoundf(errors.CodeNotFound, "no data found for the metric %s in the query time range", lastSeenStr(missingMetrics[0]))
|
||||
}
|
||||
parts := make([]string, len(missingMetrics))
|
||||
for i, m := range missingMetrics {
|
||||
parts[i] = lastSeenStr(m)
|
||||
}
|
||||
return nil, errors.NewNotFoundf(errors.CodeNotFound, "no data found for the following metrics in the query time range: %s", strings.Join(parts, ", "))
|
||||
}
|
||||
qbResp, qbErr := q.run(ctx, orgID, queries, req, steps, event)
|
||||
if qbResp != nil {
|
||||
qbResp.QBEvent = event
|
||||
@@ -663,7 +680,7 @@ func (q *querier) run(
|
||||
}
|
||||
|
||||
// executeWithCache executes a query using the bucket cache
|
||||
func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query qbtypes.Query, step qbtypes.Step, noCache bool) (*qbtypes.Result, error) {
|
||||
func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query qbtypes.Query, step qbtypes.Step, _ bool) (*qbtypes.Result, error) {
|
||||
// Get cached data and missing ranges
|
||||
cachedResult, missingRanges := q.bucketCache.GetMissRanges(ctx, orgID, query, step)
|
||||
|
||||
|
||||
@@ -115,7 +115,6 @@ func (r *Repo) GetLatestVersion(
|
||||
func (r *Repo) insertConfig(
|
||||
ctx context.Context, orgId valuer.UUID, userId valuer.UUID, c *opamptypes.AgentConfigVersion, elements []string,
|
||||
) error {
|
||||
|
||||
if c.ElementType.StringValue() == "" {
|
||||
return errors.NewInvalidInputf(CodeElementTypeRequired, "element type is required for creating agent config version")
|
||||
}
|
||||
@@ -229,6 +228,25 @@ func (r *Repo) updateDeployStatus(ctx context.Context,
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetDeployStatusByHash returns the DeployStatus for the given config hash
|
||||
// (stored with orgId prefix). Returns DeployStatusUnknown when no matching row exists.
|
||||
func (r *Repo) GetDeployStatusByHash(ctx context.Context, orgId valuer.UUID, configHash string) (opamptypes.DeployStatus, error) {
|
||||
var version opamptypes.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&version).
|
||||
ColumnExpr("deploy_status").
|
||||
Where("hash = ?", configHash).
|
||||
Where("org_id = ?", orgId).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return opamptypes.DeployStatusUnknown, nil
|
||||
}
|
||||
return opamptypes.DeployStatusUnknown, errors.WrapInternalf(err, errors.CodeInternal, "failed to query deploy status by hash")
|
||||
}
|
||||
return version.DeployStatus, nil
|
||||
}
|
||||
|
||||
func (r *Repo) updateDeployStatusByHash(
|
||||
ctx context.Context, orgId valuer.UUID, confighash string, status string, result string,
|
||||
) error {
|
||||
|
||||
@@ -180,6 +180,12 @@ func (m *Manager) ReportConfigDeploymentStatus(
|
||||
}
|
||||
}
|
||||
|
||||
// Implements model.AgentConfigProvider
|
||||
func (m *Manager) GetDeployStatusByHash(ctx context.Context, orgId valuer.UUID, configHash string) (opamptypes.DeployStatus, error) {
|
||||
return m.Repo.GetDeployStatusByHash(ctx, orgId, configHash)
|
||||
}
|
||||
|
||||
|
||||
func GetLatestVersion(
|
||||
ctx context.Context, orgId valuer.UUID, elementType opamptypes.ElementType,
|
||||
) (*opamptypes.AgentConfigVersion, error) {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package opamp
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
)
|
||||
|
||||
// Interface for a source of otel collector config recommendations.
|
||||
type AgentConfigProvider interface {
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"log"
|
||||
"net"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/knadh/koanf"
|
||||
@@ -127,6 +128,11 @@ func (ta *MockAgentConfigProvider) HasReportedDeploymentStatus(orgID valuer.UUID
|
||||
return exists
|
||||
}
|
||||
|
||||
// AgentConfigProvider interface
|
||||
func (ta *MockAgentConfigProvider) GetDeployStatusByHash(_ context.Context, _ valuer.UUID, _ string) (opamptypes.DeployStatus, error) {
|
||||
return opamptypes.DeployStatusUnknown, nil
|
||||
}
|
||||
|
||||
// AgentConfigProvider interface
|
||||
func (ta *MockAgentConfigProvider) SubscribeToConfigUpdates(callback func()) func() {
|
||||
subscriberId := uuid.NewString()
|
||||
|
||||
@@ -111,90 +111,99 @@ func ExtractLbFlag(agentDescr *protobufs.AgentDescription) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (agent *Agent) updateAgentDescription(newStatus *protobufs.AgentToServer) (agentDescrChanged bool) {
|
||||
prevStatus := agent.Status
|
||||
|
||||
if agent.Status == nil {
|
||||
// First time this Agent reports a status, remember it.
|
||||
agent.Status = newStatus
|
||||
agentDescrChanged = true
|
||||
} else {
|
||||
// Not a new Agent. Update the Status.
|
||||
agent.Status.SequenceNum = newStatus.SequenceNum
|
||||
|
||||
// Check what's changed in the AgentDescription.
|
||||
if newStatus.AgentDescription != nil {
|
||||
// If the AgentDescription field is set it means the Agent tells us
|
||||
// something is changed in the field since the last status report
|
||||
// (or this is the first report).
|
||||
// Make full comparison of previous and new descriptions to see if it
|
||||
// really is different.
|
||||
if prevStatus != nil && proto.Equal(prevStatus.AgentDescription, newStatus.AgentDescription) {
|
||||
// Agent description didn't change.
|
||||
agentDescrChanged = false
|
||||
} else {
|
||||
// Yes, the description is different, update it.
|
||||
agent.Status.AgentDescription = newStatus.AgentDescription
|
||||
agentDescrChanged = true
|
||||
}
|
||||
} else {
|
||||
// AgentDescription field is not set, which means description didn't change.
|
||||
agentDescrChanged = false
|
||||
}
|
||||
|
||||
// Update remote config status if it is included and is different from what we have.
|
||||
if newStatus.RemoteConfigStatus != nil &&
|
||||
!proto.Equal(agent.Status.RemoteConfigStatus, newStatus.RemoteConfigStatus) {
|
||||
agent.Status.RemoteConfigStatus = newStatus.RemoteConfigStatus
|
||||
|
||||
// todo: need to address multiple agent scenario here
|
||||
// for now, the first response will be sent back to the UI
|
||||
if agent.Status.RemoteConfigStatus.Status == protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED {
|
||||
onConfigSuccess(agent.OrgID, agent.AgentID, string(agent.Status.RemoteConfigStatus.LastRemoteConfigHash))
|
||||
}
|
||||
|
||||
if agent.Status.RemoteConfigStatus.Status == protobufs.RemoteConfigStatuses_RemoteConfigStatuses_FAILED {
|
||||
onConfigFailure(agent.OrgID, agent.AgentID, string(agent.Status.RemoteConfigStatus.LastRemoteConfigHash), agent.Status.RemoteConfigStatus.ErrorMessage)
|
||||
}
|
||||
}
|
||||
// agentDescriptionChanged returns true when the agent sends updated properties
|
||||
// (e.g. capability flag, version) mid-connection, signalling the server to
|
||||
// recompute and push a new RemoteConfig.
|
||||
//
|
||||
// On reconnect this always returns false: handleFirstStatus pre-copies
|
||||
// AgentDescription into agent.Status so no diff is detected, avoiding a
|
||||
// redundant config recompute.
|
||||
func (agent *Agent) agentDescriptionChanged(newStatus *protobufs.AgentToServer) bool {
|
||||
// nil AgentDescription means no change per OpAMP protocol.
|
||||
if newStatus.AgentDescription == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if agentDescrChanged {
|
||||
agent.CanLB = ExtractLbFlag(newStatus.AgentDescription)
|
||||
if proto.Equal(agent.Status.AgentDescription, newStatus.AgentDescription) {
|
||||
return false
|
||||
}
|
||||
|
||||
return agentDescrChanged
|
||||
agent.CanLB = ExtractLbFlag(newStatus.AgentDescription)
|
||||
return true
|
||||
}
|
||||
|
||||
func (agent *Agent) updateHealth(newStatus *protobufs.AgentToServer) {
|
||||
if newStatus.Health == nil {
|
||||
// updateRemoteConfigStatus updates the stored RemoteConfigStatus and notifies
|
||||
// subscribers if the status has changed relative to what we have stored.
|
||||
func (agent *Agent) updateRemoteConfigStatus(newStatus *protobufs.AgentToServer) {
|
||||
if newStatus.RemoteConfigStatus == nil ||
|
||||
proto.Equal(agent.Status.RemoteConfigStatus, newStatus.RemoteConfigStatus) {
|
||||
return
|
||||
}
|
||||
|
||||
agent.Status.Health = newStatus.Health
|
||||
|
||||
if agent.Status != nil && agent.Status.Health != nil && agent.Status.Health.Healthy {
|
||||
agent.TimeAuditable.UpdatedAt = time.Unix(0, int64(agent.Status.Health.StartTimeUnixNano)).UTC()
|
||||
// todo: need to address multiple agent scenario here
|
||||
// for now, the first response will be sent back to the UI
|
||||
hash := string(newStatus.RemoteConfigStatus.LastRemoteConfigHash)
|
||||
switch newStatus.RemoteConfigStatus.Status {
|
||||
case protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED:
|
||||
onConfigSuccess(agent.OrgID, agent.AgentID, hash)
|
||||
case protobufs.RemoteConfigStatuses_RemoteConfigStatuses_FAILED:
|
||||
onConfigFailure(agent.OrgID, agent.AgentID, hash, newStatus.RemoteConfigStatus.ErrorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
func (agent *Agent) updateRemoteConfigStatus(newStatus *protobufs.AgentToServer) {
|
||||
// Update remote config status if it is included and is different from what we have.
|
||||
if newStatus.RemoteConfigStatus != nil {
|
||||
agent.Status.RemoteConfigStatus = newStatus.RemoteConfigStatus
|
||||
// handleFirstStatus initializes agent.Status on the first message received from
|
||||
// this agent instance. It is a no-op for all subsequent messages.
|
||||
func (agent *Agent) handleFirstStatus(newStatus *protobufs.AgentToServer, configProvider AgentConfigProvider) {
|
||||
if agent.Status != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Initialize with a clean slate.
|
||||
agent.Status = &protobufs.AgentToServer{
|
||||
RemoteConfigStatus: &protobufs.RemoteConfigStatus{
|
||||
Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_UNSET,
|
||||
},
|
||||
}
|
||||
|
||||
if newStatus.RemoteConfigStatus == nil ||
|
||||
newStatus.RemoteConfigStatus.Status == protobufs.RemoteConfigStatuses_RemoteConfigStatuses_UNSET {
|
||||
// Agent just started fresh — no prior deployment to reconcile with the DB.
|
||||
return
|
||||
}
|
||||
|
||||
// Since the server's connection is restarted;
|
||||
// copy the agent description; so no change is detected by agentDescriptionChanged
|
||||
agent.Status.AgentDescription = newStatus.AgentDescription
|
||||
|
||||
// Server reconnected while the agent was already running.
|
||||
// Reconcile deployment status with DB; DB is the source of truth.
|
||||
// If DB says in_progress but agent now reports APPLIED/FAILED,
|
||||
// updateRemoteConfigStatus will detect the transition and notify subscribers.
|
||||
rawHash := string(newStatus.RemoteConfigStatus.LastRemoteConfigHash)
|
||||
deployStatus, err := configProvider.GetDeployStatusByHash(context.Background(), agent.OrgID, agent.OrgID.String()+rawHash)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
agent.Status.RemoteConfigStatus.Status = opamptypes.DeployStatusToProtoStatus[deployStatus]
|
||||
|
||||
// If the deployment is still in-flight, rehydrate the subscriber so that
|
||||
// updateRemoteConfigStatus can fire onConfigSuccess/onConfigFailure when
|
||||
// the agent next reports a terminal status.
|
||||
if deployStatus != opamptypes.Deployed && deployStatus != opamptypes.DeployFailed {
|
||||
ListenToConfigUpdate(agent.OrgID, agent.AgentID, rawHash, configProvider.ReportConfigDeploymentStatus)
|
||||
}
|
||||
}
|
||||
|
||||
func (agent *Agent) updateStatusField(newStatus *protobufs.AgentToServer) (agentDescrChanged bool) {
|
||||
if agent.Status == nil {
|
||||
// First time this Agent reports a status, remember it.
|
||||
agent.Status = newStatus
|
||||
agentDescrChanged = true
|
||||
func (agent *Agent) updateStatusField(newStatus *protobufs.AgentToServer, configProvider AgentConfigProvider) bool {
|
||||
agent.handleFirstStatus(newStatus, configProvider)
|
||||
agentDescrChanged := agent.agentDescriptionChanged(newStatus)
|
||||
// record healthy timestamp
|
||||
if newStatus.Health != nil && newStatus.Health.Healthy {
|
||||
agent.TimeAuditable.UpdatedAt = time.Unix(0, int64(newStatus.Health.StartTimeUnixNano)).UTC()
|
||||
}
|
||||
|
||||
agentDescrChanged = agent.updateAgentDescription(newStatus) || agentDescrChanged
|
||||
// notify subscribers first; this will update the status in the DB
|
||||
agent.updateRemoteConfigStatus(newStatus)
|
||||
agent.updateHealth(newStatus)
|
||||
// update local reference in last.
|
||||
agent.Status = newStatus
|
||||
return agentDescrChanged
|
||||
}
|
||||
|
||||
@@ -237,7 +246,7 @@ func (agent *Agent) processStatusUpdate(
|
||||
// current status is not up-to-date.
|
||||
lostPreviousUpdate := (agent.Status == nil) || (agent.Status != nil && agent.Status.SequenceNum+1 != newStatus.SequenceNum)
|
||||
|
||||
agentDescrChanged := agent.updateStatusField(newStatus)
|
||||
agentDescrChanged := agent.updateStatusField(newStatus, configProvider)
|
||||
|
||||
// Check if any fields were omitted in the status report.
|
||||
effectiveConfigOmitted := newStatus.EffectiveConfig == nil &&
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
package model
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Interface for source of otel collector config recommendations.
|
||||
type AgentConfigProvider interface {
|
||||
@@ -20,4 +25,10 @@ type AgentConfigProvider interface {
|
||||
configId string,
|
||||
err error,
|
||||
)
|
||||
|
||||
// GetDeployStatusByHash returns the DeployStatus for the given config hash
|
||||
// (with orgId prefix as stored in the DB). Returns DeployStatusUnknown when
|
||||
// no matching row exists. Used by the agent's first-connect handler to
|
||||
// determine whether the reported RemoteConfigStatus resolves a pending deployment.
|
||||
GetDeployStatusByHash(ctx context.Context, orgId valuer.UUID, configHash string) (opamptypes.DeployStatus, error)
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ func ListenToConfigUpdate(orgId valuer.UUID, agentId string, hash string, ss OnC
|
||||
defer coordinator.mutex.Unlock()
|
||||
|
||||
key := getSubscriberKey(orgId, hash)
|
||||
|
||||
if subs, ok := coordinator.subscribers[key]; ok {
|
||||
subs = append(subs, ss)
|
||||
coordinator.subscribers[key] = subs
|
||||
|
||||
@@ -76,6 +76,21 @@ func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
|
||||
alertDataRows := cmock.NewRows(cols, tc.Values)
|
||||
|
||||
mock := mockStore.Mock()
|
||||
// Mock metadata queries for FetchTemporalityAndTypeMulti
|
||||
// First query: fetchMetricsTemporalityAndType (from signoz_metrics time series table)
|
||||
metadataCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "temporality", Type: "String"},
|
||||
{Name: "type", Type: "String"},
|
||||
{Name: "is_monotonic", Type: "Bool"},
|
||||
}
|
||||
metadataRows := cmock.NewRows(metadataCols, [][]any{
|
||||
{"probe_success", metrictypes.Unspecified, metrictypes.GaugeType, false},
|
||||
})
|
||||
mock.ExpectQuery("*distributed_time_series_v4*").WithArgs(nil, nil, nil).WillReturnRows(metadataRows)
|
||||
// Second query: fetchMeterSourceMetricsTemporalityAndType (from signoz_meter table)
|
||||
emptyMetadataRows := cmock.NewRows(metadataCols, [][]any{})
|
||||
mock.ExpectQuery("*meter*").WithArgs(nil).WillReturnRows(emptyMetadataRows)
|
||||
|
||||
// Generate query arguments for the metric query
|
||||
evalTime := time.Now().UTC()
|
||||
|
||||
@@ -7,12 +7,14 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
plabels "github.com/prometheus/prometheus/model/labels"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
// PromRuleTask is a promql rule executor
|
||||
@@ -371,7 +373,7 @@ func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("rule_id", rule.ID())
|
||||
comment.Set("auth_type", "internal")
|
||||
comment.Set("identn_provider", authtypes.IdentNProviderInternal.StringValue())
|
||||
ctx = ctxtypes.NewContextWithComment(ctx, comment)
|
||||
|
||||
_, err := rule.Eval(ctx, ts)
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -358,7 +359,7 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("rule_id", rule.ID())
|
||||
comment.Set("auth_type", "internal")
|
||||
comment.Set("identn_provider", authtypes.IdentNProviderInternal.StringValue())
|
||||
ctx = ctxtypes.NewContextWithComment(ctx, comment)
|
||||
|
||||
_, err := rule.Eval(ctx, ts)
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
@@ -54,7 +54,7 @@ func (migration *addManagedRoles) Up(ctx context.Context, db *bun.DB) error {
|
||||
return err
|
||||
}
|
||||
|
||||
managedRoles := []*roletypes.StorableRole{}
|
||||
managedRoles := []*authtypes.StorableRole{}
|
||||
for _, orgIDStr := range orgIDs {
|
||||
orgID, err := valuer.NewUUID(orgIDStr)
|
||||
if err != nil {
|
||||
@@ -62,20 +62,20 @@ func (migration *addManagedRoles) Up(ctx context.Context, db *bun.DB) error {
|
||||
}
|
||||
|
||||
// signoz admin
|
||||
signozAdminRole := roletypes.NewRole(roletypes.SigNozAdminRoleName, roletypes.SigNozAdminRoleDescription, roletypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, roletypes.NewStorableRoleFromRole(signozAdminRole))
|
||||
signozAdminRole := authtypes.NewRole(authtypes.SigNozAdminRoleName, authtypes.SigNozAdminRoleDescription, authtypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, authtypes.NewStorableRoleFromRole(signozAdminRole))
|
||||
|
||||
// signoz editor
|
||||
signozEditorRole := roletypes.NewRole(roletypes.SigNozEditorRoleName, roletypes.SigNozEditorRoleDescription, roletypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, roletypes.NewStorableRoleFromRole(signozEditorRole))
|
||||
signozEditorRole := authtypes.NewRole(authtypes.SigNozEditorRoleName, authtypes.SigNozEditorRoleDescription, authtypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, authtypes.NewStorableRoleFromRole(signozEditorRole))
|
||||
|
||||
// signoz viewer
|
||||
signozViewerRole := roletypes.NewRole(roletypes.SigNozViewerRoleName, roletypes.SigNozViewerRoleDescription, roletypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, roletypes.NewStorableRoleFromRole(signozViewerRole))
|
||||
signozViewerRole := authtypes.NewRole(authtypes.SigNozViewerRoleName, authtypes.SigNozViewerRoleDescription, authtypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, authtypes.NewStorableRoleFromRole(signozViewerRole))
|
||||
|
||||
// signoz anonymous
|
||||
signozAnonymousRole := roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, roletypes.NewStorableRoleFromRole(signozAnonymousRole))
|
||||
signozAnonymousRole := authtypes.NewRole(authtypes.SigNozAnonymousRoleName, authtypes.SigNozAnonymousRoleDescription, authtypes.RoleTypeManaged, orgID)
|
||||
managedRoles = append(managedRoles, authtypes.NewStorableRoleFromRole(signozAnonymousRole))
|
||||
}
|
||||
|
||||
if len(managedRoles) > 0 {
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/oklog/ulid/v2"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/dialect"
|
||||
@@ -83,7 +83,7 @@ func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context,
|
||||
INSERT INTO tuple (store, object_type, object_id, relation, _user, user_type, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, object_type, object_id, relation, _user) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+authtypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -102,7 +102,7 @@ func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context,
|
||||
INSERT INTO changelog (store, object_type, object_id, relation, _user, operation, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+authtypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -113,7 +113,7 @@ func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context,
|
||||
INSERT INTO tuple (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, user_type, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+authtypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -132,7 +132,7 @@ func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context,
|
||||
INSERT INTO changelog (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, operation, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+authtypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -1928,3 +1928,37 @@ func (t *telemetryMetaStore) GetFirstSeenFromMetricMetadata(ctx context.Context,
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) FetchLastSeenInfoMulti(ctx context.Context, metricNames ...string) (map[string]int64, error) {
|
||||
sb := sqlbuilder.Select(
|
||||
"metric_name",
|
||||
"max(unix_milli)",
|
||||
).
|
||||
From(t.metricsDBName + "." + telemetrymetrics.TimeseriesV4TableName)
|
||||
sb.Where(sb.In("metric_name", metricNames))
|
||||
sb.GroupBy("metric_name")
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
t.logger.DebugContext(ctx, "fetching metric last seen timestamp", "query", query, "args", args)
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to fetch metric last seen info")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
lastSeenInfo := make(map[string]int64)
|
||||
for rows.Next() {
|
||||
var metricName string
|
||||
var unix_milli int64
|
||||
if err := rows.Scan(&metricName, &unix_milli); err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to scan last seen info result")
|
||||
}
|
||||
lastSeenInfo[metricName] = unix_milli
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error iterating over metrics temporality rows")
|
||||
}
|
||||
return lastSeenInfo, nil
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ var (
|
||||
IdentNProviderTokenizer = IdentNProvider{valuer.NewString("tokenizer")}
|
||||
IdentNProviderAPIkey = IdentNProvider{valuer.NewString("api_key")}
|
||||
IdentNProviderAnonymous = IdentNProvider{valuer.NewString("anonymous")}
|
||||
IdentNProviderInternal = IdentNProvider{valuer.NewString("internal")}
|
||||
)
|
||||
|
||||
type IdentNProvider struct{ valuer.String }
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
package roletypes
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
"github.com/uptrace/bun"
|
||||
@@ -51,7 +51,7 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
TypeableResourcesRoles = authtypes.MustNewTypeableMetaResources(authtypes.MustNewName("roles"))
|
||||
TypeableResourcesRoles = MustNewTypeableMetaResources(MustNewName("roles"))
|
||||
)
|
||||
|
||||
type StorableRole struct {
|
||||
@@ -194,20 +194,20 @@ func (role *PatchableRole) UnmarshalJSON(data []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetAdditionTuples(name string, orgID valuer.UUID, relation authtypes.Relation, additions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
|
||||
func GetAdditionTuples(name string, orgID valuer.UUID, relation Relation, additions []*Object) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, object := range additions {
|
||||
typeable := authtypes.MustNewTypeableFromType(object.Resource.Type, object.Resource.Name)
|
||||
typeable := MustNewTypeableFromType(object.Resource.Type, object.Resource.Name)
|
||||
transactionTuples, err := typeable.Tuples(
|
||||
authtypes.MustNewSubject(
|
||||
authtypes.TypeableRole,
|
||||
MustNewSubject(
|
||||
TypeableRole,
|
||||
name,
|
||||
orgID,
|
||||
&authtypes.RelationAssignee,
|
||||
&RelationAssignee,
|
||||
),
|
||||
relation,
|
||||
[]authtypes.Selector{object.Selector},
|
||||
[]Selector{object.Selector},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -220,20 +220,20 @@ func GetAdditionTuples(name string, orgID valuer.UUID, relation authtypes.Relati
|
||||
return tuples, nil
|
||||
}
|
||||
|
||||
func GetDeletionTuples(name string, orgID valuer.UUID, relation authtypes.Relation, deletions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
|
||||
func GetDeletionTuples(name string, orgID valuer.UUID, relation Relation, deletions []*Object) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, object := range deletions {
|
||||
typeable := authtypes.MustNewTypeableFromType(object.Resource.Type, object.Resource.Name)
|
||||
typeable := MustNewTypeableFromType(object.Resource.Type, object.Resource.Name)
|
||||
transactionTuples, err := typeable.Tuples(
|
||||
authtypes.MustNewSubject(
|
||||
authtypes.TypeableRole,
|
||||
MustNewSubject(
|
||||
TypeableRole,
|
||||
name,
|
||||
orgID,
|
||||
&authtypes.RelationAssignee,
|
||||
&RelationAssignee,
|
||||
),
|
||||
relation,
|
||||
[]authtypes.Selector{object.Selector},
|
||||
[]Selector{object.Selector},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -254,3 +254,15 @@ func MustGetSigNozManagedRoleFromExistingRole(role types.Role) string {
|
||||
|
||||
return managedRole
|
||||
}
|
||||
|
||||
type RoleStore interface {
|
||||
Create(context.Context, *StorableRole) error
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*StorableRole, error)
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*StorableRole, error)
|
||||
List(context.Context, valuer.UUID) ([]*StorableRole, error)
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*StorableRole, error)
|
||||
ListByOrgIDAndIDs(context.Context, valuer.UUID, []valuer.UUID) ([]*StorableRole, error)
|
||||
Update(context.Context, valuer.UUID, *StorableRole) error
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
RunInTx(context.Context, func(ctx context.Context) error) error
|
||||
}
|
||||
43
pkg/types/cloudintegrationtypes/account.go
Normal file
43
pkg/types/cloudintegrationtypes/account.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Account struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
ProviderAccountId *string `json:"providerAccountID,omitempty"`
|
||||
Provider CloudProviderType `json:"provider"`
|
||||
RemovedAt *time.Time `json:"removedAt,omitempty"`
|
||||
AgentReport *AgentReport `json:"agentReport,omitempty"`
|
||||
OrgID valuer.UUID `json:"orgID"`
|
||||
Config *AccountConfig `json:"config,omitempty"`
|
||||
}
|
||||
|
||||
// AgentReport represents heartbeats sent by the agent.
|
||||
type AgentReport struct {
|
||||
TimestampMillis int64 `json:"timestampMillis"`
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
type GettableAccounts struct {
|
||||
Accounts []*Account `json:"accounts"`
|
||||
}
|
||||
|
||||
type GettableAccount = Account
|
||||
|
||||
type UpdatableAccount struct {
|
||||
Config *AccountConfig `json:"config"`
|
||||
}
|
||||
|
||||
type AccountConfig struct {
|
||||
AWS *AWSAccountConfig `json:"aws,omitempty"`
|
||||
}
|
||||
|
||||
type AWSAccountConfig struct {
|
||||
Regions []string `json:"regions"`
|
||||
}
|
||||
80
pkg/types/cloudintegrationtypes/cloudintegration.go
Normal file
80
pkg/types/cloudintegrationtypes/cloudintegration.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeCloudIntegrationNotFound = errors.MustNewCode("cloud_integration_not_found")
|
||||
)
|
||||
|
||||
// StorableCloudIntegration represents a cloud integration stored in the database.
|
||||
// This is also referred as "Account" in the context of cloud integrations.
|
||||
type StorableCloudIntegration struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
|
||||
Provider CloudProviderType `bun:"provider,type:text"`
|
||||
Config string `bun:"config,type:text"` // Config is provider-specific data in JSON string format
|
||||
AccountID *string `bun:"account_id,type:text"`
|
||||
LastAgentReport *StorableAgentReport `bun:"last_agent_report,type:text"`
|
||||
RemovedAt *time.Time `bun:"removed_at,type:timestamp,nullzero"`
|
||||
OrgID valuer.UUID `bun:"org_id,type:text"`
|
||||
}
|
||||
|
||||
// StorableAgentReport represents the last heartbeat and arbitrary data sent by the agent
|
||||
// as of now there is no use case for Data field, but keeping it for backwards compatibility with older structure.
|
||||
type StorableAgentReport struct {
|
||||
TimestampMillis int64 `json:"timestamp_millis"` // backward compatibility
|
||||
Data map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
// StorableCloudIntegrationService is to store service config for a cloud integration, which is a cloud provider specific configuration.
|
||||
type StorableCloudIntegrationService struct {
|
||||
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
|
||||
Type ServiceID `bun:"type,type:text,notnull"` // Keeping Type field name as is, but it is a service id
|
||||
Config string `bun:"config,type:text"` // Config is cloud provider's service specific data in JSON string format
|
||||
CloudIntegrationID valuer.UUID `bun:"cloud_integration_id,type:text"`
|
||||
}
|
||||
|
||||
// Scan scans value from DB.
|
||||
func (r *StorableAgentReport) Scan(src any) error {
|
||||
var data []byte
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
data = v
|
||||
case string:
|
||||
data = []byte(v)
|
||||
default:
|
||||
return errors.NewInternalf(errors.CodeInternal, "tried to scan from %T instead of string or bytes", src)
|
||||
}
|
||||
return json.Unmarshal(data, r)
|
||||
}
|
||||
|
||||
// Value creates value to be stored in DB.
|
||||
func (r *StorableAgentReport) Value() (driver.Value, error) {
|
||||
if r == nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(r)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(
|
||||
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
|
||||
)
|
||||
}
|
||||
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytes
|
||||
return string(serialized), nil
|
||||
}
|
||||
41
pkg/types/cloudintegrationtypes/cloudprovider.go
Normal file
41
pkg/types/cloudintegrationtypes/cloudprovider.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// CloudProviderType type alias.
|
||||
type CloudProviderType struct{ valuer.String }
|
||||
|
||||
var (
|
||||
// cloud providers.
|
||||
CloudProviderTypeAWS = CloudProviderType{valuer.NewString("aws")}
|
||||
CloudProviderTypeAzure = CloudProviderType{valuer.NewString("azure")}
|
||||
|
||||
// errors.
|
||||
ErrCodeCloudProviderInvalidInput = errors.MustNewCode("invalid_cloud_provider")
|
||||
|
||||
AWSIntegrationUserEmail = valuer.MustNewEmail("aws-integration@signoz.io")
|
||||
AzureIntegrationUserEmail = valuer.MustNewEmail("azure-integration@signoz.io")
|
||||
)
|
||||
|
||||
// CloudIntegrationUserEmails is the list of valid emails for Cloud One Click integrations.
|
||||
// This is used for validation and restrictions in different contexts, across codebase.
|
||||
var CloudIntegrationUserEmails = []valuer.Email{
|
||||
AWSIntegrationUserEmail,
|
||||
AzureIntegrationUserEmail,
|
||||
}
|
||||
|
||||
// NewCloudProvider returns a new CloudProviderType from a string.
|
||||
// It validates the input and returns an error if the input is not valid cloud provider.
|
||||
func NewCloudProvider(provider string) (CloudProviderType, error) {
|
||||
switch provider {
|
||||
case CloudProviderTypeAWS.StringValue():
|
||||
return CloudProviderTypeAWS, nil
|
||||
case CloudProviderTypeAzure.StringValue():
|
||||
return CloudProviderTypeAzure, nil
|
||||
default:
|
||||
return CloudProviderType{}, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
|
||||
}
|
||||
}
|
||||
88
pkg/types/cloudintegrationtypes/connection.go
Normal file
88
pkg/types/cloudintegrationtypes/connection.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/integrationtypes"
|
||||
|
||||
type ConnectionArtifactRequest struct {
|
||||
Aws *AWSConnectionArtifactRequest `json:"aws"`
|
||||
}
|
||||
|
||||
type AWSConnectionArtifactRequest struct {
|
||||
DeploymentRegion string `json:"deploymentRegion"`
|
||||
Regions []string `json:"regions"`
|
||||
}
|
||||
|
||||
type PostableConnectionArtifact = ConnectionArtifactRequest
|
||||
|
||||
type ConnectionArtifact struct {
|
||||
Aws *AWSConnectionArtifact `json:"aws"`
|
||||
}
|
||||
|
||||
type AWSConnectionArtifact struct {
|
||||
ConnectionUrl string `json:"connectionURL"`
|
||||
}
|
||||
|
||||
type GettableConnectionArtifact = ConnectionArtifact
|
||||
|
||||
type AccountStatus struct {
|
||||
Id string `json:"id"`
|
||||
ProviderAccountId *string `json:"providerAccountID,omitempty"`
|
||||
Status integrationtypes.AccountStatus `json:"status"`
|
||||
}
|
||||
|
||||
type GettableAccountStatus = AccountStatus
|
||||
|
||||
type AgentCheckInRequest struct {
|
||||
// older backward compatible fields are mapped to new fields
|
||||
// CloudIntegrationId string `json:"cloudIntegrationId"`
|
||||
// AccountId string `json:"accountId"`
|
||||
|
||||
// New fields
|
||||
ProviderAccountId string `json:"providerAccountId"`
|
||||
CloudAccountId string `json:"cloudAccountId"`
|
||||
|
||||
Data map[string]any `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type PostableAgentCheckInRequest struct {
|
||||
AgentCheckInRequest
|
||||
// following are backward compatible fields for older running agents
|
||||
// which gets mapped to new fields in AgentCheckInRequest
|
||||
CloudIntegrationId string `json:"cloud_integration_id"`
|
||||
CloudAccountId string `json:"cloud_account_id"`
|
||||
}
|
||||
|
||||
type GettableAgentCheckInResponse struct {
|
||||
AgentCheckInResponse
|
||||
|
||||
// For backward compatibility
|
||||
CloudIntegrationId string `json:"cloud_integration_id"`
|
||||
AccountId string `json:"account_id"`
|
||||
}
|
||||
|
||||
type AgentCheckInResponse struct {
|
||||
// Older fields for backward compatibility are mapped to new fields below
|
||||
// CloudIntegrationId string `json:"cloud_integration_id"`
|
||||
// AccountId string `json:"account_id"`
|
||||
|
||||
// New fields
|
||||
ProviderAccountId string `json:"providerAccountId"`
|
||||
CloudAccountId string `json:"cloudAccountId"`
|
||||
|
||||
// IntegrationConfig populates data related to integration that is required for an agent
|
||||
// to start collecting telemetry data
|
||||
// keeping JSON key snake_case for backward compatibility
|
||||
IntegrationConfig *IntegrationConfig `json:"integration_config,omitempty"`
|
||||
}
|
||||
|
||||
type IntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabledRegions"` // backward compatible
|
||||
Telemetry *AWSCollectionStrategy `json:"telemetry,omitempty"` // backward compatible
|
||||
|
||||
// new fields
|
||||
AWS *AWSIntegrationConfig `json:"aws,omitempty"`
|
||||
}
|
||||
|
||||
type AWSIntegrationConfig struct {
|
||||
EnabledRegions []string `json:"enabledRegions"`
|
||||
Telemetry *AWSCollectionStrategy `json:"telemetry,omitempty"`
|
||||
}
|
||||
103
pkg/types/cloudintegrationtypes/regions.go
Normal file
103
pkg/types/cloudintegrationtypes/regions.go
Normal file
@@ -0,0 +1,103 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeInvalidCloudRegion = errors.MustNewCode("invalid_cloud_region")
|
||||
ErrCodeMismatchCloudProvider = errors.MustNewCode("cloud_provider_mismatch")
|
||||
)
|
||||
|
||||
// List of all valid cloud regions on Amazon Web Services.
|
||||
var ValidAWSRegions = map[string]struct{}{
|
||||
"af-south-1": {}, // Africa (Cape Town).
|
||||
"ap-east-1": {}, // Asia Pacific (Hong Kong).
|
||||
"ap-northeast-1": {}, // Asia Pacific (Tokyo).
|
||||
"ap-northeast-2": {}, // Asia Pacific (Seoul).
|
||||
"ap-northeast-3": {}, // Asia Pacific (Osaka).
|
||||
"ap-south-1": {}, // Asia Pacific (Mumbai).
|
||||
"ap-south-2": {}, // Asia Pacific (Hyderabad).
|
||||
"ap-southeast-1": {}, // Asia Pacific (Singapore).
|
||||
"ap-southeast-2": {}, // Asia Pacific (Sydney).
|
||||
"ap-southeast-3": {}, // Asia Pacific (Jakarta).
|
||||
"ap-southeast-4": {}, // Asia Pacific (Melbourne).
|
||||
"ca-central-1": {}, // Canada (Central).
|
||||
"ca-west-1": {}, // Canada West (Calgary).
|
||||
"eu-central-1": {}, // Europe (Frankfurt).
|
||||
"eu-central-2": {}, // Europe (Zurich).
|
||||
"eu-north-1": {}, // Europe (Stockholm).
|
||||
"eu-south-1": {}, // Europe (Milan).
|
||||
"eu-south-2": {}, // Europe (Spain).
|
||||
"eu-west-1": {}, // Europe (Ireland).
|
||||
"eu-west-2": {}, // Europe (London).
|
||||
"eu-west-3": {}, // Europe (Paris).
|
||||
"il-central-1": {}, // Israel (Tel Aviv).
|
||||
"me-central-1": {}, // Middle East (UAE).
|
||||
"me-south-1": {}, // Middle East (Bahrain).
|
||||
"sa-east-1": {}, // South America (Sao Paulo).
|
||||
"us-east-1": {}, // US East (N. Virginia).
|
||||
"us-east-2": {}, // US East (Ohio).
|
||||
"us-west-1": {}, // US West (N. California).
|
||||
"us-west-2": {}, // US West (Oregon).
|
||||
}
|
||||
|
||||
// List of all valid cloud regions for Microsoft Azure.
|
||||
var ValidAzureRegions = map[string]struct{}{
|
||||
"australiacentral": {}, // Australia Central
|
||||
"australiacentral2": {}, // Australia Central 2
|
||||
"australiaeast": {}, // Australia East
|
||||
"australiasoutheast": {}, // Australia Southeast
|
||||
"austriaeast": {}, // Austria East
|
||||
"belgiumcentral": {}, // Belgium Central
|
||||
"brazilsouth": {}, // Brazil South
|
||||
"brazilsoutheast": {}, // Brazil Southeast
|
||||
"canadacentral": {}, // Canada Central
|
||||
"canadaeast": {}, // Canada East
|
||||
"centralindia": {}, // Central India
|
||||
"centralus": {}, // Central US
|
||||
"chilecentral": {}, // Chile Central
|
||||
"denmarkeast": {}, // Denmark East
|
||||
"eastasia": {}, // East Asia
|
||||
"eastus": {}, // East US
|
||||
"eastus2": {}, // East US 2
|
||||
"francecentral": {}, // France Central
|
||||
"francesouth": {}, // France South
|
||||
"germanynorth": {}, // Germany North
|
||||
"germanywestcentral": {}, // Germany West Central
|
||||
"indonesiacentral": {}, // Indonesia Central
|
||||
"israelcentral": {}, // Israel Central
|
||||
"italynorth": {}, // Italy North
|
||||
"japaneast": {}, // Japan East
|
||||
"japanwest": {}, // Japan West
|
||||
"koreacentral": {}, // Korea Central
|
||||
"koreasouth": {}, // Korea South
|
||||
"malaysiawest": {}, // Malaysia West
|
||||
"mexicocentral": {}, // Mexico Central
|
||||
"newzealandnorth": {}, // New Zealand North
|
||||
"northcentralus": {}, // North Central US
|
||||
"northeurope": {}, // North Europe
|
||||
"norwayeast": {}, // Norway East
|
||||
"norwaywest": {}, // Norway West
|
||||
"polandcentral": {}, // Poland Central
|
||||
"qatarcentral": {}, // Qatar Central
|
||||
"southafricanorth": {}, // South Africa North
|
||||
"southafricawest": {}, // South Africa West
|
||||
"southcentralus": {}, // South Central US
|
||||
"southindia": {}, // South India
|
||||
"southeastasia": {}, // Southeast Asia
|
||||
"spaincentral": {}, // Spain Central
|
||||
"swedencentral": {}, // Sweden Central
|
||||
"switzerlandnorth": {}, // Switzerland North
|
||||
"switzerlandwest": {}, // Switzerland West
|
||||
"uaecentral": {}, // UAE Central
|
||||
"uaenorth": {}, // UAE North
|
||||
"uksouth": {}, // UK South
|
||||
"ukwest": {}, // UK West
|
||||
"westcentralus": {}, // West Central US
|
||||
"westeurope": {}, // West Europe
|
||||
"westindia": {}, // West India
|
||||
"westus": {}, // West US
|
||||
"westus2": {}, // West US 2
|
||||
"westus3": {}, // West US 3
|
||||
}
|
||||
248
pkg/types/cloudintegrationtypes/service.go
Normal file
248
pkg/types/cloudintegrationtypes/service.go
Normal file
@@ -0,0 +1,248 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
S3Sync = valuer.NewString("s3sync")
|
||||
// ErrCodeInvalidServiceID is the error code for invalid service id.
|
||||
ErrCodeInvalidServiceID = errors.MustNewCode("invalid_service_id")
|
||||
)
|
||||
|
||||
type ServiceID struct{ valuer.String }
|
||||
|
||||
type CloudIntegrationService struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Type ServiceID `json:"type"`
|
||||
Config *ServiceConfig `json:"config"`
|
||||
CloudIntegrationID valuer.UUID `json:"cloudIntegrationID"`
|
||||
}
|
||||
|
||||
// ServiceMetadata helps to quickly list available services and whether it is enabled or not.
|
||||
// As getting complete service definition is a heavy operation and the response is also large,
|
||||
// initial integration page load can be very slow.
|
||||
type ServiceMetadata struct {
|
||||
ServiceDefinitionMetadata
|
||||
// if the service is enabled for the account
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
type GettableServicesMetadata struct {
|
||||
Services []*ServiceMetadata `json:"services"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
ServiceDefinition
|
||||
ServiceConfig *ServiceConfig `json:"serviceConfig"`
|
||||
}
|
||||
|
||||
type GettableService = Service
|
||||
|
||||
type UpdatableService struct {
|
||||
Config *ServiceConfig `json:"config"`
|
||||
}
|
||||
|
||||
type ServiceConfig struct {
|
||||
AWS *AWSServiceConfig `json:"aws,omitempty"`
|
||||
}
|
||||
|
||||
type AWSServiceConfig struct {
|
||||
Logs *AWSServiceLogsConfig `json:"logs"`
|
||||
Metrics *AWSServiceMetricsConfig `json:"metrics"`
|
||||
}
|
||||
|
||||
// AWSServiceLogsConfig is AWS specific logs config for a service
|
||||
// NOTE: the JSON keys are snake case for backward compatibility with existing agents.
|
||||
type AWSServiceLogsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
||||
}
|
||||
|
||||
type AWSServiceMetricsConfig struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
// ServiceDefinitionMetadata represents service definition metadata. This is useful for showing service tab in frontend.
|
||||
type ServiceDefinitionMetadata struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Icon string `json:"icon"`
|
||||
}
|
||||
|
||||
type ServiceDefinition struct {
|
||||
ServiceDefinitionMetadata
|
||||
Overview string `json:"overview"` // markdown
|
||||
Assets Assets `json:"assets"`
|
||||
SupportedSignals SupportedSignals `json:"supported_signals"`
|
||||
DataCollected DataCollected `json:"dataCollected"`
|
||||
Strategy *CollectionStrategy `json:"telemetryCollectionStrategy"`
|
||||
}
|
||||
|
||||
// CollectionStrategy is cloud provider specific configuration for signal collection,
|
||||
// this is used by agent to understand the nitty-gritty for collecting telemetry for the cloud provider.
|
||||
type CollectionStrategy struct {
|
||||
AWS *AWSCollectionStrategy `json:"aws,omitempty"`
|
||||
}
|
||||
|
||||
// Assets represents the collection of dashboards.
|
||||
type Assets struct {
|
||||
Dashboards []Dashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
// SupportedSignals for cloud provider's service.
|
||||
type SupportedSignals struct {
|
||||
Logs bool `json:"logs"`
|
||||
Metrics bool `json:"metrics"`
|
||||
}
|
||||
|
||||
// DataCollected is curated static list of metrics and logs, this is shown as part of service overview.
|
||||
type DataCollected struct {
|
||||
Logs []CollectedLogAttribute `json:"logs"`
|
||||
Metrics []CollectedMetric `json:"metrics"`
|
||||
}
|
||||
|
||||
// CollectedLogAttribute represents a log attribute that is present in all log entries for a service,
|
||||
// this is shown as part of service overview.
|
||||
type CollectedLogAttribute struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
// CollectedMetric represents a metric that is collected for a service, this is shown as part of service overview.
|
||||
type CollectedMetric struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Unit string `json:"unit"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
// AWSCollectionStrategy represents signal collection strategy for AWS services.
|
||||
// this is AWS specific.
|
||||
// NOTE: this structure is still using snake case, for backward compatibility,
|
||||
// with existing agents.
|
||||
type AWSCollectionStrategy struct {
|
||||
Metrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
|
||||
Logs *AWSLogsStrategy `json:"aws_logs,omitempty"`
|
||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type in AWS
|
||||
}
|
||||
|
||||
// AWSMetricsStrategy represents metrics collection strategy for AWS services.
|
||||
// this is AWS specific.
|
||||
// NOTE: this structure is still using snake case, for backward compatibility,
|
||||
// with existing agents.
|
||||
type AWSMetricsStrategy struct {
|
||||
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
|
||||
StreamFilters []struct {
|
||||
// json tags here are in the shape expected by AWS API as detailed at
|
||||
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
|
||||
Namespace string `json:"Namespace"`
|
||||
MetricNames []string `json:"MetricNames,omitempty"`
|
||||
} `json:"cloudwatch_metric_stream_filters"`
|
||||
}
|
||||
|
||||
// AWSLogsStrategy represents logs collection strategy for AWS services.
|
||||
// this is AWS specific.
|
||||
// NOTE: this structure is still using snake case, for backward compatibility,
|
||||
// with existing agents.
|
||||
type AWSLogsStrategy struct {
|
||||
Subscriptions []struct {
|
||||
// subscribe to all logs groups with specified prefix.
|
||||
// eg: `/aws/rds/`
|
||||
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
||||
|
||||
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
|
||||
// "" implies no filtering is required.
|
||||
FilterPattern string `json:"filter_pattern"`
|
||||
} `json:"cloudwatch_logs_subscriptions"`
|
||||
}
|
||||
|
||||
// Dashboard represents a dashboard definition for cloud integration.
|
||||
// This is used to show available pre-made dashboards for a service,
|
||||
// hence has additional fields like id, title and description
|
||||
type Dashboard struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Definition dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
||||
}
|
||||
|
||||
// SupportedServices is the map of supported services for each cloud provider.
|
||||
var SupportedServices = map[CloudProviderType][]ServiceID{
|
||||
CloudProviderTypeAWS: {
|
||||
{valuer.NewString("alb")},
|
||||
{valuer.NewString("api-gateway")},
|
||||
{valuer.NewString("dynamodb")},
|
||||
{valuer.NewString("ec2")},
|
||||
{valuer.NewString("ecs")},
|
||||
{valuer.NewString("eks")},
|
||||
{valuer.NewString("elasticache")},
|
||||
{valuer.NewString("lambda")},
|
||||
{valuer.NewString("msk")},
|
||||
{valuer.NewString("rds")},
|
||||
{valuer.NewString("s3sync")},
|
||||
{valuer.NewString("sns")},
|
||||
{valuer.NewString("sqs")},
|
||||
},
|
||||
}
|
||||
|
||||
// NewServiceID returns a new ServiceID from a string, validated against the supported services for the given cloud provider.
|
||||
func NewServiceID(provider CloudProviderType, service string) (ServiceID, error) {
|
||||
services, ok := SupportedServices[provider]
|
||||
if !ok {
|
||||
return ServiceID{}, errors.NewInvalidInputf(ErrCodeInvalidServiceID, "no services defined for cloud provider: %s", provider)
|
||||
}
|
||||
for _, s := range services {
|
||||
if s.StringValue() == service {
|
||||
return s, nil
|
||||
}
|
||||
}
|
||||
return ServiceID{}, errors.NewInvalidInputf(ErrCodeInvalidServiceID, "invalid service id %q for cloud provider %s", service, provider)
|
||||
}
|
||||
|
||||
// UTILS
|
||||
|
||||
// GetCloudIntegrationDashboardID returns the dashboard id for a cloud integration, given the cloud provider, service id, and dashboard id.
|
||||
// This is used to generate unique dashboard ids for cloud integration, and also to parse the dashboard id to get the cloud provider and service id when needed.
|
||||
func GetCloudIntegrationDashboardID(cloudProvider CloudProviderType, svcId, dashboardId string) string {
|
||||
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcId, dashboardId)
|
||||
}
|
||||
|
||||
// GetDashboardsFromAssets returns the list of dashboards for the cloud provider service from definition.
|
||||
func GetDashboardsFromAssets(
|
||||
svcId string,
|
||||
orgID valuer.UUID,
|
||||
cloudProvider CloudProviderType,
|
||||
createdAt time.Time,
|
||||
assets Assets,
|
||||
) []*dashboardtypes.Dashboard {
|
||||
dashboards := make([]*dashboardtypes.Dashboard, 0)
|
||||
|
||||
for _, d := range assets.Dashboards {
|
||||
author := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
dashboards = append(dashboards, &dashboardtypes.Dashboard{
|
||||
ID: GetCloudIntegrationDashboardID(cloudProvider, svcId, d.Id),
|
||||
Locked: true,
|
||||
OrgID: orgID,
|
||||
Data: d.Definition,
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: createdAt,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: author,
|
||||
UpdatedBy: author,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return dashboards
|
||||
}
|
||||
41
pkg/types/cloudintegrationtypes/store.go
Normal file
41
pkg/types/cloudintegrationtypes/store.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package cloudintegrationtypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Store interface {
|
||||
// GetAccountByID returns a cloud integration account by id
|
||||
GetAccountByID(ctx context.Context, orgID, id valuer.UUID, provider CloudProviderType) (*StorableCloudIntegration, error)
|
||||
|
||||
// CreateAccount creates a new cloud integration account
|
||||
CreateAccount(ctx context.Context, account *StorableCloudIntegration) (*StorableCloudIntegration, error)
|
||||
|
||||
// UpdateAccount updates an existing cloud integration account
|
||||
UpdateAccount(ctx context.Context, account *StorableCloudIntegration) error
|
||||
|
||||
// RemoveAccount marks a cloud integration account as removed by setting the RemovedAt field
|
||||
RemoveAccount(ctx context.Context, orgID, id valuer.UUID, provider CloudProviderType) error
|
||||
|
||||
// ListConnectedAccounts returns all the cloud integration accounts for the org and cloud provider
|
||||
ListConnectedAccounts(ctx context.Context, orgID valuer.UUID, provider CloudProviderType) ([]*StorableCloudIntegration, error)
|
||||
|
||||
// GetConnectedAccount for a given provider
|
||||
GetConnectedAccount(ctx context.Context, orgID valuer.UUID, provider CloudProviderType, providerAccountID string) (*StorableCloudIntegration, error)
|
||||
|
||||
// cloud_integration_service related methods
|
||||
|
||||
// GetServiceByServiceID returns the cloud integration service for the given cloud integration id and service id
|
||||
GetServiceByServiceID(ctx context.Context, cloudIntegrationID valuer.UUID, serviceID ServiceID) (*StorableCloudIntegrationService, error)
|
||||
|
||||
// CreateService creates a new cloud integration service
|
||||
CreateService(ctx context.Context, service *StorableCloudIntegrationService) (*StorableCloudIntegrationService, error)
|
||||
|
||||
// UpdateService updates an existing cloud integration service
|
||||
UpdateService(ctx context.Context, service *StorableCloudIntegrationService) error
|
||||
|
||||
// ListServices returns all the cloud integration services for the given cloud integration id
|
||||
ListServices(ctx context.Context, cloudIntegrationID valuer.UUID) ([]*StorableCloudIntegrationService, error)
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/open-telemetry/opamp-go/protobufs"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
@@ -17,6 +18,15 @@ const (
|
||||
AgentStatusDisconnected
|
||||
)
|
||||
|
||||
var DeployStatusToProtoStatus = map[DeployStatus]protobufs.RemoteConfigStatuses{
|
||||
PendingDeploy: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_UNSET,
|
||||
Deploying: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLYING,
|
||||
Deployed: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED,
|
||||
DeployInitiated: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLYING,
|
||||
DeployFailed: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_FAILED,
|
||||
DeployStatusUnknown: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_UNSET,
|
||||
}
|
||||
|
||||
type StorableAgent struct {
|
||||
bun.BaseModel `bun:"table:agent"`
|
||||
|
||||
@@ -30,16 +40,6 @@ type StorableAgent struct {
|
||||
Config string `bun:"config,type:text,notnull"`
|
||||
}
|
||||
|
||||
func NewStorableAgent(store sqlstore.SQLStore, orgID valuer.UUID, agentID string, status AgentStatus) StorableAgent {
|
||||
return StorableAgent{
|
||||
OrgID: orgID,
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
AgentID: agentID,
|
||||
TimeAuditable: types.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
|
||||
Status: status,
|
||||
}
|
||||
}
|
||||
|
||||
type ElementType struct{ valuer.String }
|
||||
|
||||
var (
|
||||
@@ -49,24 +49,6 @@ var (
|
||||
ElementTypeLbExporter = ElementType{valuer.NewString("lb_exporter")}
|
||||
)
|
||||
|
||||
// NewElementType creates a new ElementType from a string value.
|
||||
// Returns the corresponding ElementType constant if the string matches,
|
||||
// otherwise returns an empty ElementType.
|
||||
func NewElementType(value string) ElementType {
|
||||
switch valuer.NewString(value) {
|
||||
case ElementTypeSamplingRules.String:
|
||||
return ElementTypeSamplingRules
|
||||
case ElementTypeDropRules.String:
|
||||
return ElementTypeDropRules
|
||||
case ElementTypeLogPipelines.String:
|
||||
return ElementTypeLogPipelines
|
||||
case ElementTypeLbExporter.String:
|
||||
return ElementTypeLbExporter
|
||||
default:
|
||||
return ElementType{valuer.NewString("")}
|
||||
}
|
||||
}
|
||||
|
||||
type DeployStatus struct{ valuer.String }
|
||||
|
||||
var (
|
||||
@@ -98,6 +80,26 @@ type AgentConfigVersion struct {
|
||||
Config string `json:"config" bun:"config,type:text"`
|
||||
}
|
||||
|
||||
type AgentConfigElement struct {
|
||||
bun.BaseModel `bun:"table:agent_config_element"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
ElementID string `bun:"element_id,type:text,notnull,unique:element_type_version_idx"`
|
||||
ElementType string `bun:"element_type,type:text,notnull,unique:element_type_version_idx"`
|
||||
VersionID valuer.UUID `bun:"version_id,type:text,notnull,unique:element_type_version_idx"`
|
||||
}
|
||||
|
||||
func NewStorableAgent(store sqlstore.SQLStore, orgID valuer.UUID, agentID string, status AgentStatus) StorableAgent {
|
||||
return StorableAgent{
|
||||
OrgID: orgID,
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
AgentID: agentID,
|
||||
TimeAuditable: types.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
|
||||
Status: status,
|
||||
}
|
||||
}
|
||||
|
||||
func NewAgentConfigVersion(orgId valuer.UUID, userId valuer.UUID, elementType ElementType) *AgentConfigVersion {
|
||||
return &AgentConfigVersion{
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
@@ -118,12 +120,20 @@ func (a *AgentConfigVersion) IncrementVersion(lastVersion int) {
|
||||
a.Version = lastVersion + 1
|
||||
}
|
||||
|
||||
type AgentConfigElement struct {
|
||||
bun.BaseModel `bun:"table:agent_config_element"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
ElementID string `bun:"element_id,type:text,notnull,unique:element_type_version_idx"`
|
||||
ElementType string `bun:"element_type,type:text,notnull,unique:element_type_version_idx"`
|
||||
VersionID valuer.UUID `bun:"version_id,type:text,notnull,unique:element_type_version_idx"`
|
||||
// NewElementType creates a new ElementType from a string value.
|
||||
// Returns the corresponding ElementType constant if the string matches,
|
||||
// otherwise returns an empty ElementType.
|
||||
func NewElementType(value string) ElementType {
|
||||
switch valuer.NewString(value) {
|
||||
case ElementTypeSamplingRules.String:
|
||||
return ElementTypeSamplingRules
|
||||
case ElementTypeDropRules.String:
|
||||
return ElementTypeDropRules
|
||||
case ElementTypeLogPipelines.String:
|
||||
return ElementTypeLogPipelines
|
||||
case ElementTypeLbExporter.String:
|
||||
return ElementTypeLbExporter
|
||||
default:
|
||||
return ElementType{valuer.NewString("")}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
package roletypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Store interface {
|
||||
Create(context.Context, *StorableRole) error
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*StorableRole, error)
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*StorableRole, error)
|
||||
List(context.Context, valuer.UUID) ([]*StorableRole, error)
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*StorableRole, error)
|
||||
ListByOrgIDAndIDs(context.Context, valuer.UUID, []valuer.UUID) ([]*StorableRole, error)
|
||||
Update(context.Context, valuer.UUID, *StorableRole) error
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
RunInTx(context.Context, func(ctx context.Context) error) error
|
||||
}
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -102,10 +102,10 @@ func NewServiceAccountFromStorables(storableServiceAccount *StorableServiceAccou
|
||||
}
|
||||
}
|
||||
|
||||
func NewServiceAccountsFromRoles(storableServiceAccounts []*StorableServiceAccount, roles []*roletypes.Role, serviceAccountIDToRoleIDsMap map[string][]valuer.UUID) []*ServiceAccount {
|
||||
func NewServiceAccountsFromRoles(storableServiceAccounts []*StorableServiceAccount, roles []*authtypes.Role, serviceAccountIDToRoleIDsMap map[string][]valuer.UUID) []*ServiceAccount {
|
||||
serviceAccounts := make([]*ServiceAccount, 0, len(storableServiceAccounts))
|
||||
|
||||
roleIDToRole := make(map[string]*roletypes.Role, len(roles))
|
||||
roleIDToRole := make(map[string]*authtypes.Role, len(roles))
|
||||
for _, role := range roles {
|
||||
roleIDToRole[role.ID.String()] = role
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -19,7 +19,7 @@ type StorableServiceAccountRole struct {
|
||||
RoleID string `bun:"role_id"`
|
||||
}
|
||||
|
||||
func NewStorableServiceAccountRoles(serviceAccountID valuer.UUID, roles []*roletypes.Role) []*StorableServiceAccountRole {
|
||||
func NewStorableServiceAccountRoles(serviceAccountID valuer.UUID, roles []*authtypes.Role) []*StorableServiceAccountRole {
|
||||
storableServiceAccountRoles := make([]*StorableServiceAccountRole, len(roles))
|
||||
for idx, role := range roles {
|
||||
storableServiceAccountRoles[idx] = &StorableServiceAccountRole{
|
||||
@@ -38,7 +38,7 @@ func NewStorableServiceAccountRoles(serviceAccountID valuer.UUID, roles []*rolet
|
||||
return storableServiceAccountRoles
|
||||
}
|
||||
|
||||
func NewRolesFromStorableServiceAccountRoles(storable []*StorableServiceAccountRole, roles []*roletypes.Role) ([]string, error) {
|
||||
func NewRolesFromStorableServiceAccountRoles(storable []*StorableServiceAccountRole, roles []*authtypes.Role) ([]string, error) {
|
||||
roleIDToName := make(map[string]string, len(roles))
|
||||
for _, role := range roles {
|
||||
roleIDToName[role.ID.String()] = role.Name
|
||||
|
||||
@@ -45,6 +45,8 @@ type MetadataStore interface {
|
||||
|
||||
// GetFirstSeenFromMetricMetadata gets the first seen timestamp for a metric metadata lookup key.
|
||||
GetFirstSeenFromMetricMetadata(ctx context.Context, lookupKeys []MetricMetadataLookupKey) (map[MetricMetadataLookupKey]int64, error)
|
||||
|
||||
FetchLastSeenInfoMulti(ctx context.Context, metricNames ...string) (map[string]int64, error)
|
||||
}
|
||||
|
||||
type MetricMetadataLookupKey struct {
|
||||
|
||||
@@ -342,3 +342,7 @@ func (m *MockMetadataStore) SetFirstSeenFromMetricMetadata(firstSeenMap map[tele
|
||||
m.LookupKeysMap[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
func (m *MockMetadataStore) FetchLastSeenInfoMulti(ctx context.Context, metricNames ...string) (map[string]int64, error) {
|
||||
return make(map[string]int64), nil
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
@@ -7,6 +8,7 @@ from sqlalchemy import sql
|
||||
from wiremock.resources.mappings import Mapping
|
||||
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker
|
||||
|
||||
|
||||
@@ -74,9 +76,37 @@ def test_public_dashboard_widget_query_range(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
):
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Insert metric data so the widget query returns results instead of 404
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
metrics: List[Metrics] = [
|
||||
Metrics(
|
||||
metric_name="container.cpu.time",
|
||||
labels={"service": "test-service"},
|
||||
timestamp=now - timedelta(minutes=5),
|
||||
value=100.0,
|
||||
temporality="Cumulative",
|
||||
),
|
||||
Metrics(
|
||||
metric_name="container.cpu.time",
|
||||
labels={"service": "test-service"},
|
||||
timestamp=now - timedelta(minutes=3),
|
||||
value=200.0,
|
||||
temporality="Cumulative",
|
||||
),
|
||||
Metrics(
|
||||
metric_name="container.cpu.time",
|
||||
labels={"service": "test-service"},
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
value=300.0,
|
||||
temporality="Cumulative",
|
||||
),
|
||||
]
|
||||
insert_metrics(metrics)
|
||||
|
||||
dashboard_req = {
|
||||
"title": "Test Widget Query Range Dashboard",
|
||||
"description": "For testing widget query range",
|
||||
|
||||
@@ -10,12 +10,16 @@ from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
assert_minutely_bucket_values,
|
||||
build_builder_query,
|
||||
find_named_result,
|
||||
index_series_by_label,
|
||||
make_query_request,
|
||||
)
|
||||
from fixtures.utils import get_testdata_file_path
|
||||
|
||||
FILL_GAPS = "fillGaps"
|
||||
FILL_ZERO = "fillZero"
|
||||
HISTOGRAM_FILE = get_testdata_file_path("histogram_data_1h.jsonl")
|
||||
|
||||
|
||||
def _build_format_options(fill_mode: str) -> Dict[str, Any]:
|
||||
@@ -580,3 +584,39 @@ def test_metrics_fill_formula_with_group_by(
|
||||
expected_by_ts=expectations[group],
|
||||
context=f"metrics/{fill_mode}/F1/{group}",
|
||||
)
|
||||
|
||||
def test_histogram_p90_returns_404_outside_data_window(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
) -> None:
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
metric_name = "test_p90_last_seen_bucket"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
HISTOGRAM_FILE,
|
||||
base_time=now - timedelta(minutes=90),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"doesnotreallymatter",
|
||||
"p90",
|
||||
)
|
||||
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
start_2h = int((now - timedelta(hours=2)).timestamp() * 1000)
|
||||
response = make_query_request(signoz, token, start_2h, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
start_15m = int((now - timedelta(minutes=15)).timestamp() * 1000)
|
||||
response = make_query_request(signoz, token, start_15m, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.NOT_FOUND
|
||||
|
||||
Reference in New Issue
Block a user